var/home/core/zuul-output/0000755000175000017500000000000015157244517014540 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015157251163015477 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000256000215157251005020254 0ustar corecoreRikubelet.log_o[;r)Br'o -n(!9t%Cs7}g/غIs,r.k9Gf ?|p6b}Wߟ/nțx{w~{N_u~𒆷7̗8zTY\].f}嗷ovϷw_>on3cvX~egQBeH,nWb m/m}*L~AzHev_uαHJ2E$(Ͽ|/+k*z>p R⥑gF)49)(oՈ7_k0m^p9PneQn͂YEeeɹ ^ʙ|ʕ0MۂAraZR}@E1%]˜(O)X(6I;Ff"mcI۫d@FNsdxό?2$&tg*Y%\ߘfDP'F%Ab*d@e˛H,љ:72 2ƴ40tr>PYD'vt'oI¢w}o٬owko%gQ(%t#uTc0Ys66q tH6#.`$vlLH}ޭA㑝V0>|J\Pg\W#NqɌDSd1d9nT#Abn q1J# !8,$RNI? j!bE"o j/o\E`r"hA ós yi\_.!=A(%Ud,QwC}F][UVYE NQGn0Ƞɻ>.ww}(o./WY<͉#5O H 'wo6C9yg|O~ €' S[q?,!yq%a:y<\tunL h%$Ǥ].v y[W_` \r/Ɛ%aޗ' B.-^ mQYd'xP2ewEڊL|^ͣrZg7n͐AG%ʷr<>; 2W>h?y|(G>ClsXT(VIx$(J:&~CQpkۗgVKx*lJ3o|s`<՛=JPBUGߩnX#;4ٻO2{Fݫr~AreFj?wQC9yO|$UvވkZoIfzC|]|[>ӸUKҳt17ä$ ֈm maUNvS_$qrMY QOΨN!㞊;4U^Z/ QB?q3En.اeI"X#gZ+Xk?povR]8~깮$b@n3xh!|t{: CºC{ 8Ѿm[ ~z/9آs;DPsif39HoN λC?; H^-¸oZ( +"@@%'0MtW#:7erԮoQ#% H!PK)~U,jxQV^pΣ@Klb5)%L%7׷v] gv6دϾDD}c6  %T%St{kJ_O{*Z8Y CEO+'HqZY PTUJ2dic3w ?YQgpa` Z_0΁?kMPc_Ԝ*΄Bs`kmJ?t 53@հ1hr}=5t;at 9:I_|AאM'NO;uD,z҄R K&Nh c{A`?2ZҘ[a-0V&2D[d#L6l\Jk}8gf) afs'oIf'mf\>UxR ks J)'u4iLaNIc2qdNA&aLQVD R0*06V۽棬mpھ*V I{a 0Ҟҝ>Ϗ ,ȓw`Ȅ/2Zjǽ}W4D)3N*[kPF =trSE *b9ē7$ M_8.Ç"q ChCMAgSdL0#W+CUu"k"圀̲F9,,&h'ZJz4U\d +( 7EqڏuC+]CEF 8'9@OVvnNbm: X„RDXfיa }fqG*YƩ{P0K=( $hC=h2@M+ `@P4Re]1he}k|]eO,v^ȹ [=zX[tꆯI7c<ۃ'BdIc*Qqk&60XdGY!D ' @{!b4ִ s Exb 5dKߤKߒ'&YILұ4q6y{&G`%$8Tt ȥ#5vGVO2Қ;m#NS8}d0Q?zLV3\LuOx:,|$;rVauNjk-ؘPꐤ`FD'JɻXC&{>.}y7Z,).Y톯h7n%PAUË/_xY~7.w47mnjGgG{9_e552s4IG^ۃn󨔖I@[ tWv Fyw9J֥WmN^<.eܢMρ'JÖŢո%gQ=p2YaI"&ư%# yCùXz!bm5uAߙXC90뼯nNNXYt\oP@gOV ]cӰJ:^q';E=-dZB4']a.QO:#'6RE'E3 */HAYk|z|ءPQgOJӚ:ƞŵ׉5'{#ޢ1c qw zǽ0 2mK:ȔsGdurWMF*֢v|EC#{usSMiI S/jﴍ8wPVC P2EU:F4!ʢlQHZ9E CBU)Y(S8)c yO[E}Lc&ld\{ELO3芷AgX*;RgXGdCgX JgX2*Ъ3:O7ǭ3ږA :}d,ZByXϯ&Ksg3["66hŢFD&iQCFd4%h= z{tKmdߟ9i {A.:Mw~^`X\u6|6rcIF3b9O:j 2IN…D% YCUI}~;XI썋Fqil><UKkZ{iqi :íy˧FR1u)X9 f΁U ~5batx|ELU:T'Tស[G*ݧ ؽZK̡O6rLmȰ (T$ n#b@hpj:˾ojs)M/8`$:) X+ҧSaۥzw}^P1J%+P:Dsƫ%z; +g 0հc0E) 3jƯ?e|miȄ?lm$K/$s_. WM]̍"W%`lO2-"ew@E=!|!p+,ICE^fu `|M3J#BQȌ6DNnCˣ"F$/Qx%m&FK_7P|٢?I-RiAKoQrMI>QQ!'7h,sF\jzP\7:Q\)#s{p'ɂN$r;fVkv߸>6!<̅:xn<# -BȢ1I~ŋ-*|`В~_>ۅm}67X9z=Oa Am]fnޤ{"hd߃Ԉ|tLD3 7'yOc& LFs%B!sRE2K0p\0͙npV)̍F$X8a-bp)5,] Bo|ؖA]Y`-jyL'8>JJ{>źuMp(jL!M7uTźmr(Uxbbqe5rZ HҘ3ڴ(|e@ew>w3C=9k-{p>րd^T@eFZ#WWwYzK uK r؛6V L)auS6=`#(TO֙`mn Lv%7mSU@n_Vۀl9BIcSxlT![`[klzFض˪.l >7l@ΖLl gEj gWUDnr7AG;lU6ieabp៚U|,}S@t1:X _ .xI_7ve Z@7IX/C7@u BGڔE7M/k $q^hڧ};naU%~X!^C5Aw͢.@d!@dU}b? -ʏw |VvlK۴ymkiK% 0OFjT_kPW1mk%?\@R>XCl}b ,8; :.b9m]XaINE`!6uOhUuta^xN@˭d- T5 $4ذ:[a>֋&"_ }Oõϸ~rj uw\h~M il[ 2pCaOok.X0C?~[:^Pr򣏷y@/ڠ --i!M5mjozEƨ||Yt,=d#uЇ  l]չoݴmqV".lCqBѷ /![auPmpnEjus]2{2#b'$?T3{k>h+@]*pp桸]%nĴFԨlu |VXnq#r:kg_Q1,MNi˰ 7#`VCpᇽmpM+tWuk0 q /} 5 ¶]fXEj@5JcU_b@JS`wYmJ gEk2'0/> unKs^C6B WEt7M'#|kf1:X l]ABC {kanW{ 6 g`_w\|8Fjȡstuf%Plx3E#zmxfU S^ 3_`wRY}@ŹBz²?mК/mm}m"Gy4dl\)cb<>O0BďJrDd\TDFMEr~q#i}$y3.*j) qQa% |`bEۈ8S 95JͩA3SX~߃ʟ~㍖›f!OI1R~-6͘!?/Vvot4~6I@GNݖ-m[d<-l9fbn,'eO2sٟ+AWzw A<4 }w"*mj8{ P&Y#ErwHhL2cPr Wҭюky7aXt?2 'so fnHXx1o@0TmBLi0lhѦ* _9[3L`I,|J @xS}NEij]Qexx*lJF#+L@-ՑQz֬]")JC])"K{vuZm8pnglVj!p2֬uT[QyB402|2d5K: `Bcz|Rxxl3{c` 1nhJzQHv?hbºܞz=73qSO0}Dc D]ͺjgw07'㤸z YJ\Hb9Ɖ„2Hi{(2HFE?*w*hy4ޙM^٫wF(p]EwQzr*! 5F XrĉF3B%Z~O/_BKCQϰԨ\uRT{/;;^u'}8H0]+ES,n?UU{ x~ʓOy_>?/>l8MrHID2VSsMX^"NۯDc558c&'K0L /C5YDqNe~ض˸nErc֋@aw*r܀0 a {RQXV-/p:MP\<=<^越a/bz?ܓvjIg3MN4:]U]STa,@OKdٻҦ62L0ډ"ܺ_z9JNȯ=@oUI y4 A(" 뭗R==9!nKErHc1FYbQ F;v?ob-ڈFalG*rEX}HAP'Hҷ$qM9(AHx!AF 26qxCdP!NZgҽ9l*(H Žڒ;̼|%D Ɖ`Pj . ֈ,ixp`ttOKBDޙ''aLA2s0(G2E<I:xsB.ȼ*d42I:<ŋu#~us{dW<2~sQ37.&lOľu74c?MՏړ@ -N*CB=i3,qjGkUտu6k Cb8hs&sM@-=X(i7=@He%ISd$&iA|i MiʏݸT{r[j顒x.Ƞ"m@Hy_I )j|s#RGI!dTKL&4K>#stV \'xMgaSZNg8>e!^f%cYr]qs:"̊;isXa]d+"v=x7p.fZCg_Ys;pE&\U}ܫSh])qKYAـhhdEnU14&G * QIQs;rԩ.k83֖8Muqu_48dHܥlWW q>fu6+'}xu\Veelz`Zbym gp8펠ˋֆ:1IC8qٞ\vXçL ]X/r}7O}Wh,h ;RQ=]u00yiC۔I^3!?H6iUH:ô 4P$rT`%2Aq-֢׍qt=@x#~0)p# ы9'iri]ͪ/@繁qVGCڤr,DihB ,m 9 _$q3= A$IC"6g^4e`Xo(D*6"^eTh'4xpFڜe'fVQ7~'c L^ԯwIڣA.}H;Ë*׬=`^ 9]r鐃 -Dfi2|QwZk‹u^6DQ1&H凎c!n[mi3)WfsF:M"uҷs.1!뾧1%s,hQs|hx̗3%*v9(I;:'>uQ+v)vR/egBhAAdh]4H:nV$tHI98/)=mͭ ڐn}}~ק?g_6WĩDRc0]rY9'z .(jHI :{HG}HDN`h7@{jnE#[dz;n#y 9D*A$$"^)dVQ.(rO6ӟZw_Ȣaޒu'- ^_,G;U\cAAz7EtlLuoXuA}bT2H_*kIG?Dy%١oBbFM=$OQYꐙ^=Zza5a%פG,ϒPV3^KPbGVO'daOU%tt!ƖRG9lhfd#]y=DFT8F}$RD<8 ].v\-v:8F+Mt|ga.!! р#ݴtӫߴ]vWͽ2]Q6Û͘`_}KnK"]p<)Xg '鸽= &Xu=y`g[#ɯO"?5Vg3gR(Җ}f`ӀSqUق0D L?U7_nMBLϸY&0Ro6Qžl+nݷ" 㬙g|ӱFB@qNx^eCSW3\ZSA !c/!b"'9k I S2=bgj쯏W?=`}H0--VV#YmKW^[?R$+ +cU )?wW@!j-gw2ŝl1!iaI%~`{Tռl>~,?5D K\gd(ZH8@x~5w.4\h(`dc)}1Kqi4~'p!;_V>&M!s}FDͳ֧0O*Vr/tdQu!4YhdqT nXeb|Ivż7>! &ĊL:}3*8&6f5 %>~R݄}WgѨ@OĹCtWai4AY!XH _pw騋[b[%/d>. !Df~;)(Oy )r#.<]]i-*ػ-f24qlT1  jL>1qY|\䛧\|r>Ch}Ϊ=jnk?p ^C8"M#Eޑ-5@f,|Ά(Շ*(XCK*"pXR[كrq IH!6=Ocnи%G"|ڔ^kПy׏<:n:!d#[7>^.hd/}ӾP'k2MؤYy/{!ca /^wT j˚ب|MLE7Ee/I lu//j8MoGqdDt^_Y\-8!ד|$@D.ݮl`p48io^.š{_f>O)J=iwwӑ؇n-i3,1׿5'odۆ3(h>1UW蚍R$W>sngir^$W v:?_ͬ5kݰw[!$s׭dֲcUh=Ɩ9b&2} -/f;M.~dhÓ5¨LIa6PnzɗBQiG'CXt!*<0U-(qc;}*CiKe@p&Em&x!i6ٱ˭K& FCfJ9%ٕQ·BD-]R1#]TROr}S [;Zcq6xMY 6seAU9c>Xf~TTX)QӅtӚe~=WtX-sJb?U'3X7J4l+Cj%LPFxŰAVG Y%.9Vnd8? ǫjU3k%E)OD:"Ϳ%E)=}l/'O"Q_4ILAٍKK7'lWQVm0c:%UEhZ].1lcazn2ͦ_DQP/2 re%_bR~r9_7*vrv |S.Z!rV%¢EN$i^B^rX؆ z1ǡXtiK`uk&LO./!Z&p:ˏ!_B{{s1>"=b'K=}|+: :8au"N@#=Ugzy]sTv||Aec Xi.gL'—Ʃb4AUqػ< &}BIrwZ\"t%>6ES5oaPqobb,v 2w s1,jX4W->L!NUy*Gݓ KmmlTbc[O`uxOp  |T!|ik3cL_ AvG i\fs$<;uI\XAV{ˍlJsŅjЙNhwfG8>Vڇg18 O3E*dt:|X`Z)|z&V*"9U_R=Wd<)tc(߯)Y]g5>.1C( .K3g&_P9&`|8|Ldl?6o AMҪ1EzyNAtRuxyn\]q_ߍ&zk.)Eu{_rjuWݚ;*6mMq!R{QWR=oVbmyanUn.Uqsy.?W8 r[zW*8nؿ[;vmcoW]"U;gm>?Z֒Z6`!2XY]-Zcp˿˘ɲ}MV<в~!?YXV+lx)RRfb-I7p)3XɯEr^,bfbKJ'@hX><[@ ,&,]$*բk-Yv5 '1T9!(*t 0'b@񲱥-kc6VnR0h& 0Z|ђ8 CGV[4xIIWN?Yt>lf@ Vi`D~ڇŁQLLkY <ZPKoma_u` !>Z;3F\dEB n+0Z ?&s{ 6(E|<ޭLk1Yn(F!%sx]>CTl9"و5 |ݹր|/#.w0ޒx"khD?O`-9C| &8֨O8VH5uH)28 Ǿ-R9~ +#e;U6]aD6Xzqd5y n';)VKL]O@b OIAG Lmc 2;\d˽$Mu>WmCEQuabAJ;`uy-u.M>9VsWٔo RS`S#m8k;(WAXq 8@+S@+' 8U˜z+ZU;=eTtX->9U-q .AV/|\ǔ%&$]1YINJ2]:a0OWvI.O6xMY0/M$ *s5x{gsəL3{$)ՆbG(}1wt!wVf;I&Xi43غgR 6 ݩJ$)}Ta@ nS*X#r#v6*;WJ-_@q.+?DK១btMp1 1Gȩ f,M`,Lr6E} m"8_SK$_#O;V 7=xLOu-ȹ2NKLjp*: 'SasyrFrcC0 ѱ LKV:U} -:U8t[=EAV$=i[mhm"roe5jqf$i>;V0eOޞ4ccc2J1TN.7q;"sդSP) 0v3-)-ٕAg"pZ: "ka+n!e߮lɹL V3Os\ဝ+A= 2䣔AzG\ ` \vc"Kj61O Px"3Pc /' PW*3GX liWv-6W&)cX |]O;C%8@*Z1%8Gk@5^NtY"Fbi8D'+_1&1 7U^k6v읨gQ`LRx+I&s5Www` q:cdʰ H`X;"}B=-/M~C>''1R[sdJm RD3Q{)bJatdq>*Ct/GǍ-`2:u)"\**dPdvc& HwMlF@a5`+F>ΰ-q>0*s%Q)L>$ćYV\dsEGز/:ٕycZtO 2ze31cDB/eWy!A/V4cbpWaPBIpqS<(lȣ'3K?e Z?ڠ8VSZM}pnqL f2D?mzq*a[~;DY〩b𻾋-]f8dBմVs6傊zF"daeY(R+q%sor|.v\sfa:TX%;3Xl= \k>kqBbB;t@/Cԍ)Ga[ r=nl-w/38ѮI*/=2!j\FW+[3=`BZWX Zd>t*Uǖ\*Fu6Y3[yBPj|LcwaIuR;uݷ㺾|47ߍeys=.EinE% 1zY\+͕߬VͭW_겼cazyU1wOw)Ǽn@6 |lk'Z|VZpsqL5 څB}>u)^v~,󿴝} 3+m𢛲Pz_Sp2auQAP*tLnIXA6L7 8UgKdT)*7>p{Pgi-b)>U6IXabPde Ӽ8Ģ8GɄnb'G ֤Mcv4?>HC78NE@UMc8>`TvZ:}O wmm#~9/ Ǚ;N8h)EoIɲce+fItbuݺK&ZP Q4qgp-%2TIew jSdpwo< ㇪ H` QH#Ky+mhʱ4(L\BIv3X/(TaVܞ%_~CT^8DȷV \'k)Pbi@O bq7pbpSxpSI-OxeɠNE2n2TuA|BSp.'Y! D?ljBpyS!zg;GIv5T 3 ))SRbtOlɩfG&}26|x PM[+Juq̊\ݟp.LI+2*#ўM1E(  Psᘎ +N ۆ}o,YQM<#GΙ&t %m~uWb&4^/GV2!ݝQ$ټ*NAa( LHqq"$X= k{' 8g[k߸Χ)aHPLgg(8:ߏ+x ||)@xBmbU!dN3!i`vߎQYyGVWq:;y+X6"V-Lſ$P͊|>.:E:N5#1:$xJ[癘ʰ.iFt%a'̮m~`‡dgĂGkȵ*%agaZ 9doMIXLߧ,-v"i! z1D h~M9 q4ێCwdx_6DS 8O'Bv>a{ >7S,hN0?ݪ8.- ;-/ⰅeOy Nɵ<~khx|z(.˪%-ɫOOIyR cwK3gMAYqӓ>!<=>8ߑ0.E(izeo"ֱY 9t&ӓPxTLl#rw7;Wߟє]aEʓO'I|D1ww1e^o4/ngMpW$̢/G'2;mw cwi2`dЎ&v"0㻁?OK_vGIjV$m:ug{U_']*H)PA( DMDLgm/D3B"'K,Mq{U*IN7.u6]`ȰqdA/y~JO{^Bm҂ vrwߤH-#ĩ>DbFOB"n+ƊqɃde!yR͏2i][]4^1-%(+B[7Y PrD:Xj*Rw@$mHa< :}9W"`ݼd,s$Ԟ%Uc[VWaNxrm[1\R q^;130maTm-3|Wy-0TFvdJk, J/`<`;9☞Ve#']h`NQ5-y76tՕ4,)DqZEc-*ZSDsa ۻyVR=uhpP%<.hWMpma%Ō–b7 Nm nSc4.[:`򠺊=okØ_ YRn/U{U 8>6F ׭1*29@ryY\γ&$dWwҜZSUyo.zM1ҨZ і\3I>i+H3pq_);Rnr0]=e*weWYPG4\MCP[]JF0yXMiZ*7N]~U4 K"*gUH@bJX@$o.VW 2axGHV60VIK)GC)!mSMgK:?4DT&-`%ok@YlDiZP bf|F#a6؇5v.uL]5jcxcf8cF[mG5DK E;ji7}m{|>dE,yehmNsnLFPQ9wt\Jn얣l!V40T +X+ cAUч_k~JѴ$ smiuc[\o95M;i܏"^$|+`eG,7"0u=+ L3t].<݉|[uMPrU lop*xUez*BEVɰZ%FhõԴtnLYWu{!iYl"-aLzC!oK RI>DWmoϲi ۠tKS9@kzmʖFLyr?kj&SelZHWe!e w}@Zi345t+R[p}4v;O^/^ij^u}cʮU-р:W@3|U:V0ʱEqiV.$#q_U>xmNnt!FA!tU=؛qxԃo=sh,2BEK#GĻp0LW{.nKaU]ҶRX 1li{D5i\'>eLWyMɑ)b3ū:.k+j|/|.~ܴ ^=M,3rb1:uxFӳ@RUF\ipE*KyӋTUUgI6DvHr Ȼ 3l9Z%iU%4 sskov"Dq C;|3ѳjq#{Kf[ak f'޿6LO߷Q@a7 0buysg= w;1KH <? [b aߊ 0s9j ψ3ŀ06^|T+ ^GPۃᛶ<5`on ߄>azq-@=jXu,a:@?3ljFzjM #q\=0Ξ]\cƃ >&E_fنGHB: O{D2cBx`^{56|BZ0G0eO+8|Jfob8¶Ԁ]ќ.Mıp%1y'uE$ir"px1SEM2S?86C`ƦnɸTG=N+f0TS)|oM2XVe?yyI$fIm.x]{pKȍk&IJGZoCċ'b&ʉ"W6= \60nN\bmtg;8ޟq睜rLkA`wBXbxqAa`TP\  S{/ cB$Y@*"bк<P7-{x!]Cy P+ `@ב' zK <˪x?̎@4W!k uAP B5#~<8F3!/gc 2= @ww09\Q(b J;bKdA$!{; z2 x"9gI|k/>9" | owx=YGQOBxbD+ +:,pϙ} :Nx`KpG΂@HP DgpF@A+Q>aGz=pb }x spƲsʽPN tVݺ8ޗPN#w*ꝗ6gݳqhhIn#F9sXbRQ Žpl@&&+~9</d 3t눯3R8 5ygaOf$ "u$/QgّEؓ^ܡ/򢮣@_K@ .FU`R@OARԤR-jjǵvvf;}֏pFaz y]UeeYRI9&˜pH-d.4aI5*>S~@*Y;Bx4$,"" -2 2 {+<`jӰ&v;16aC__F 0`]:o ~i zυ8!UvMXb9#ȸ*ukf-{ӆU`_YHi?jLP3%LxΫ+i`r |0E)Xa'Bm8bHOq=LGgxOF5TQ:?;yy*b:!,' 7`Y,挔zsXe|jF^bD8tu9FAZ-V4levo:f׏::ypiмfC$..pGbYd-&i2s\?VթAn|.Y̆X8<oC} >krY֯?Z1Ʉq tD1Sγ }%}P؁}9%N5#% 4RhHMRZsadBqfF2jҧӼe;₥ =q*3@rmjEÇCQ|#ʃ,I  2I`s' Qa_Sk;fBO* %G|!YNaiqxd#xrw?R{+'TՊ~*rq"bJ)|:T* ٸKL$-5̥q|GU6sos5j5 ܫkjFpa1bj@nWZޣkU \XRv?uLU>},p4˯a9 V^3{p2җsG |:@A47ܐ|5feut/`#?>$oWhB\հSL5t&NlFS8:!WeuN X<*FS;G7_2"'lvǡt~⤼ANv2? O߿F灠K܃˻˅"X(ޗzޘyJBK`L9ƌyvyIv0J44##o2\(siB3X.mD^8nքe?`Fq0hG!&gRx4 NM5LӵNw~k#ͭLGZHcc<ط05-9],NgMrH3޲4tM֡_Gl _i|" i ڇIuws!yf^5 u}Xy_PkdIl9ǽ W<[9$S 5 q6Fl2: <(q"K|3< bߏcVvnӪ5 8W^8{1l!=ElӰxէO;V8?51~G}  jRBE:Uy]JӞ8p4qDޗw0ڎv[;٭y ~Rx B91_瓘AˠZަ9*aڊLs؇i 6sYVtA)igByִV e[ʶ'=P#lB׺mӧlNeOUkuc]}snӻ*:)FAwa⺨ M#t@"7\IzQp#BZQaW5IuY`Cw*qKLI0o@mA>νmΓbW)t?9y̪/tq+2ΈAa(۔0Z Q{ wI\S$żCK.8ELX&\ZYCǹ?q:*Jtޱ FTKT6]ИBa(_ Z>j_,!jI=SS ץnNpiwxĴ0s4MbK*.?tNF 0CxlcT4{_,|+R9xI5A=)|B jOH #@imwg6gXEѫe(@V;ǖR+`=,ֺ r̓0' j +8ͅ2,i|xns]qW鸬Ay Y26kd>ЖC.J&6[O w%1hĝ `D ׃B(*T9kY e2& _D%k0C埪$zQ u\LOtW?_i0+)d/Oۉ+ vEBw O,ڈ`"EOj lSij~睈6*L,ͲR){5m)| "kDY ,t&bQYKqU(h &. \8xs&"IaG:6^\ϔda@1p,2Igidk6P@1)<|љW@X>r iOTqasQ=ҿW**JjkS?O<#CŁfW+gp$8 Ͼ8#_c}H{o7- :h6?Ĉ~a]hߪxtr|_ABW|κ lQ+P:M_&BT XgsuEtmϷ"g57euk'̀iI}gE!$-1$*Fe!,-$o#HZzcHqmsSiet<~Ӻ0v8|fzT:d85¢'0"$UhVh33HZ|1$4L:A@T mE r4&/FWJbˁDYt `ch7[+8} MP$;Dj44RQ2Zȿg5dj@{,sRg'"uD"^g-m|_PfvHW*V/Jr I3pV@:te>~#a͞-F0:P۴1*c\KSO;N53ve6p`i6TfhqA99+YX3_ZQ\N}, P^7~'wQoHKG28kiV8 k!-@ȅZDTIU93љ%;愹ϮP)kz`aʖsiF)p̚d dm%ԦDz٤241fk)6{+w@ʒh3fK"u]gی+ w+4~=Nn'A]rESaL+a;jSĨE=EaM~r]5M)R/Rb21AMjԙppzd~ /3i:=ASBHBTgTKm Ôkjb"v|YL*im2MKd֥1Z;ۂѵast1hy+,ͯr[07,ja1ܛ8LZi>"xc%hXY[?">ԆE&T:U:ĖRez?m$N(ڢ١+:h/x6~Tu2&]xLc5o-%9*>cf3L8;PW Ϫz0@PmYCOQhLmuGD%vh%,u<[J|T55ܦ&ɩe'byO&*H)u Gt3>j飾a `MC^%(IB{0h;>nS=>:w#)Fy6w!HժC YxSEQ4ɑfdZԄvHFñ|UwOPJ%IqP. 0Ep|^S$J~낡>+BV9s&YZIGL Z8CU6 b66s2#EŃ]pz([]xnjU(EfOk♦Hxf.)WbxyS ڤRQ5 ?$H횐REcTL8.1t\$H͇5Pa4m} Y[2x>EұcF%(&"!݇rP7D,:gLoI+~Ȑ#k8?+Xt6o=֨r"@I[ 9' /Tn/K:E~ъpUIc^ -O0P r8%J𰜗5<)A;^cPmpj- @Sp<&/i7&ERߢIc?+2 y]p`:eU+4ht>jt)"XGȲrt4KОLd3IG:WE:EH* c;pI7PY5 N4 ي?te|侏 88+&2 90B ZV1刦"XwяKba%(<1 1KXXCH:Ѥ]IpYMYpT%xiľc`8Lm?f_n~P7mSϼ%h>fgnNwO] 7?/hfhݰ`08lizd?-f߭}?|| Two{R˪EiJW><lWm{sg_rG\kEK\aOxmy}j[)7[Pv?qFտ/p ~ƷilZ3A 0 YZ%PNzNR ??jMgcW-YYo_z)׾/ u;lk!s%'HcLĠq0hƘ7j! ^w@-a=^c]d)|^8@9B+ >lzT0\93Zzd%!J0EґR\r橍D{ $u;=yb$txNK~{~>yxgGixݷ!r 'ūYMl_~26H 3+qf0тa+:;&i ER-˄g5uh o=`X8K^p࣫kQN+T<>uP1)t} {" m xg,G*ZBa =2h* Qk_-iUP;n\ͻ8XIY\@Bl_nlmLP$3oKN7v!np#[TC{C*~GbQYL(8` J@ji,e #2$fuB_m"_է:~f3g/ ˥;e_UBd/ǧ֖j|Ê%E%;.nP]Nہ@hk"EC>`+tA<*,ZM,`+Ȅ ֳE3S`SK\CI4WgU)uۏEwQ05t]h4R⒧|RJpZ_n0:71vaݨò5pqU. ɻ^i<:ԡS~qی(smЦ3>}("s^I=?|/O˧832E/P?n1蓃wJ$51Ft`CSǦ(:wK$G>ﮟz>{¤lqͫք꜓(P)Q%_KVLH\qǨ ?nyQN^c?OwoIvɛfz(g%7X ؠz}UO'=*sw9NQJ:ӧvW^=h,M1j~Vě+64^.UIGh.8~VuIdD/K@hYmS6uM̬U]vqI<;t Z$R\xALAAF[M)+.8OGDyTKFTnjj^x`R;\N H:w7xQީ';({Sh6uFu?[oOmz=RKTJL՚i'uSVďbTz`c <7KAƲ&pIG_"tqpS詡ǤU}l熺]nYOFx)4?.8tJZ_*{V8 ν2ZyVZ6&YA;1A=m $;k9)!E Q$Tee|$Y )I#[.8Q8N IEƥva9QS:OXR۶3#C=[o=T"v8%+2-%G_sۂszp@`%O Jm&g5 :9~JtSTcDoйWП)!B-x{e~ (:oIr|Ћ GkKfF#㮎|uP22Sje࣠6F5d&_'Jש>r ?ݲ&9|J$HDqRД`Zq?})FWY}n#Q${ N)SC~ 8~f?91egcϟsՠ+cs`pWt& u㮂 ߌ+savFgg?)TtGWw|>=[hp\q%OV4R\LT2`xɻ28 pʼn쯷upxwFQ,A\xBUy,Q$ú[|Tb;aC.F'o~9 f#|?7t'xT6z,QJ$$ @ho21R Si+*GM4~j7hKdz{~X/L⡆ @Ϋb2G)rBe`UՠiX dz:v۝]y_3Dl;CB{'T)~ Uc1A&Oi1-/HxM5EL\1> {_r\kZTr!ZSrJҖgh/FnqosTu*J O\@u,cUidz'\1Yh,X}RPn4)Fb&iHI!i =UI6]ѭ͆'o"ox/yځ5n5A񀫣LIUpM9vP3\ǎ{Mdi\b9mGu{hM'hAZq VB2-Ee*?Jb1ř^,cqa1Ta\5#mZO,-Up]!XuEIB*[DRc, Fbfb:i"+ r[ "P|uS mD{\ki6@j/H+`ƢUܠhSĨ-MtA &kH"SA47)h6y"%kU=lJL  "k{o S R^pѓ61aZjN[4K ҞG1eUyg"@aɌ}}Uk ⲫj (㶅 Տ%]_Y-):>Lmx/_{ſcxczU8:WhOQNH-vRlJ pw8LJeNc_ؼ‚!w*/q,)AF=iR;C7MTߞ֫@oQGKA`f5"Dry!.' /NL?UɜƟW֊ Kw%w 0^P!srnу>i%9RBwb&֧J⦅xSV5j[LDiً]J0vNPSO< m:f93 l-)me5`,q^JTGR[Ǽ*#K+%n/eZXeM^L#h 4e@y)~-Zww+ȴi2Ӱ4h,puSL$U8z֛n%Y4Rf@*ͳ->{ytpSRwkuO X+- Է @Wmծ SL&%D oXWӈ%4&H9eUlB_ Edn>USΩxi6suqM W`Y%FPai42Aiov qyB#aH \x$ YfTo@%{}LZ2ؘI j k&ȋB%u0 =~g @jn_E|غ+kܑC1\*"Q'p˲ŰcwXpr?{aX0Ubyz8߅"!bQ"0աˈE юg)w׷Y.'W߮>K!8E"*˞X\1,1LHą1bF"YL@9 8d8pe3g.Iyާ/#|m%_?@JɆeOA~Nv5=tQ!~*Fl$6W|-1t@SJ^گgDi_ZUXyIDyB+)İk "-s)yα ^;GTT#(=aHY4"E B9c 9P]ny\˅ ,4pΘ3$jep8ϜaTia(0:s$7X #1pC@~q2CrOf^ Z–q r0a&R W:jC䟪|X=dBӓ?nHi[|X`8HaԘf!1 Xk?w1CM_3ժ5E`'܌"TnmM%I8C݄.; ɘY /0cn(F#q+%L ia+5%ɔ-ƈ56nMEb'۴dB-`h|{38bmr+(WkqsWN;de!m us^k"q^Hoƚ{H@krȂ= ڴvg7FP*g(­"k)lN6?p5%y"2fSxfTZS`<^<($ X!Z!-]!?|l| !c&cq-q O;.R-IWcR*Q 5FRsf=D" "ql}bB ZMڑ ZFL&-%.%-.X~)39$m۸Xcuі۸X.-$9SaRMF?HKN`iV{jbF-`hTid]ȱF4+$Վ\JpB#X^3ovK%Vb|1/Ќ On=[Q:+˅U\:4xp~b$ X j톏AXsgZx8_Ҽ<:4MAFAXהȒg(rG$z[ǖ9w2#MIi t<泺444@3%܀$Idjr`+U1%92F*} E * /eKMs!'9y  6V<kƒ`-0ѠIb_aryLc OH< E5{ݍ8;=(B[]n6UxvF R>vB["b /;U_;Ԗ_u>ش?p52HG}`!>.Ds:oﴥ -PhCZȭxkDMZgѧNXDMa\]fx_D4rf@)0Bdi"-΍%RZJ2k3Z|ݍޠS%#YHۺ>oN7u-&մ3aMvii &/քyhʅ Z.m4Bfef . %[ټLE`RDᚤP:?&kVD\qȸF>z@Py۔A31%d4A4рA3nxDkN˗#IE2|c^*"%+,XS$/ -ZJ?x%p5Zfl dtגeNjv]MʜvӬ:ۓ;JgR; 3J?))#`Mh4n4\DŇABI{s{ApN#DM¡天n|% nBоV a&fo..IuٝiI2^f'2z<,Ïu'қxu]_k5Nᜑ^Il=`H/ hP\h6MS1= MeX $o<v`=@ 'y5 }w ^ə+]ҏ6ԫ2?kII]8ҳmM2~;koMFingJqLӐڍ`̝_`^&/&OfBe~ WvfǾŒujz轻Z&/r*Lɩ?#_ᠩ}?c6{|s֔p jkS)k|h/$=u>?77ʻRQ!ޝjWέ z:r!r 297|*Mp}?g>g1yϙk2L!RW|ZwëM::p8>{lׇ;nA|gvJN6mVn\2@D0Uowv5S+C_ W0r 0f1h9ڮTy|;7]6T'-lqbÛIR7ߠyƼj?xQVo?^%!c ;Ë~G"?Iz4rڱı.ߵÿgiuOmRΦ&u-lCW㵉j^dQAeI(bަ9r~ UQ-D*s޿aYx4}wp5>u>^\8|߇3JY77_[>}>L g<w(( {K~D8Ƿ/{rp+q '3OsF6(|M.)WN(mT] F!/L! ۤ Qslə_(z>@kN^\-'ۙX^>ߙe ql7OӅ}rX]JhV iV1$q]?_Eu׫L?2on7-i]{G\3;yz'8s3>u&Ƃ}w{0|Lzp9DRx5u7gxdJp<_{\H&BBN%s˓zGOc]_Wrom W,k>¿^W1ޛ͠i›PӆHS&X'`IKs#]ԔOO? v_^^ xm=O9^Qrȁ;,UG{J8$%=>- QTkd}=i2 T[ .ɇ6F2W3ϐwU? qJ%{zU=QRyyZqӷ*œrb>HGEg!U,*qe0$OATg%`L!hʵ, BT؇*A[(!06B lxBPS _>"oع1hONjz Ip>]w༳>JQ:e?AY&t?x}%_ V"ËMygO'8,VVdnbSٍfy#h0zݨ%_,3Ԕ3z*Y}Y= FrJ3 ~`ٰ*] \l> *,}Z{Ԋl?0%A2 RSX͖ԣ ,DS8bиW `ӰW\NUCkāDcV3;ԚVhh|јջVѨ E<%)M+Js[d@0nS% TkdE\%Fs6ۜeO9$r`AޜˠYJk QڣA[Ag xDX<YLxXҦY\aÁ, 3E Tm<1=T*y)ϺZ\ #VS;2/8 zm4t\)}k:ڋ%׿ ]]~J\?>vGJP>5fEzR[ Vilt@`EHf@MMpC5-rvhk*hj1&^lO~RO*?F'[SuZ] ;U*sE0c8No9/u;t0`**Sy.mr qR<(a5 9LRJH$'u ާdf, i 9sdXG 1B9<<rV7r<ƨxE_0Ҙ:c-kqi-;u.+oL$G_AR- #!i8(XF얩jP3T+߈RfbM81lϦpqj;ٌhT2EWY)csٳ]VA:ՂЕF∌TQZ۲8Y e)>tf<~SUjYozchL>`xTkioK{cHTj~LFiRPK{[UZᡩ*1zf"~S5ޖH{HA+Lps4fΈ ޖG{aFJ x:*ޖJ{5 4ZiUf 1S%a-mio|WlvLFjCK{[-5^1C.IM+Eڣ-퍓MYiTpޖFH{爌T )[XiRveq^5R|T DKz[5@(tR|@)R{87^ҋ؊#@ ywAl/7NkarzŤ1SC,SLWʋߺ`N%GYw|t1V*$Mg? Yo6ǪAA3*I E*U0MB[il MUi↮O/.Oc;~^xqX/L..]ο[hQ98%t4q/C@`/\)%:+lZB.,)3TsKuAdNtK>aU2u Ne\`k*%UBE P8w岟Mw澈T}{Sǡz;ޫ0M-\*2+LFkYDUT.RBP2uL̤[Y$)fKD* /OݫP BKMU H*05JBǫ`US>KR”W=ۛ9x*gJmJYA3PRHB<&YSqVQ Y- C ~HclΑhͅ25 Z$ұpH޹5r lYn@ʋcoRIl)\0G,ˢV߷1(Ci: :A 0FGdR4^U2uNս%VmZM`X؊1֢ޠ[Bz+WQBW6|sF+AXtg !쾺u^*JW 3[kIꪖ<+Uc ,jTHM~U8NYT]JpVwQ @zbCm!m/ۉİ_NZֵ8P;+ 7!pD`-SC)k*hQ? _ (EZk$F~tT:"_3 - cwy)sin=CF? [G M o4e4i9 7! 溌](eDI* "&!$icOfjar_R|wcqw t'EI2NRuW!K[W1.@h\c57őOH2rZ)"7.[1C!D!EHTǵPT1;=Xk*x{M|EBR  n6lؠ>CC!  t]fVVD#)oxlūq}$Ba_ilrD|׈x$^8aޤ&,  x[}^Q~$`5Jf:n6J{eGfGBMը̎ .a P -0PI)%uRa`+d+:aG8[a)QQG KUh|0] @La뚟xOTţ_IwpDmX$+)qī*JfŮHeX!0*#Qz:]υEYGWy.G|s"A/$5'J>@QN/9}(gqp(U Htߒcw]dءUXH/Py^a/Z/u% $24Y^+jЍuڰ EJ_LH0'. kAt]d*!$LzέFhuCW5}W0(x2` 4" "ٶYFT eͶ\$w`n[k%(@ I2b^u3 m&†Z$0~!3 VZn84w edrӠ]( EqWH2 e!K6K QMSewa%ohԩ]K&^T!@C|Ѐl:CcWh|~⹮(uϒn(ote%dKg3 ¢DZ[RC&6SD9kCO DPnUtFUEv -rAJZ ׵_ &AHtbr+vՠl cly9"e<3Dab /$ѫ =l b>`MW1v6 !BAd2BVb!ӛ{_wZ/9HK⭎n'ZbԖ_H2 %TFj\``Ѳ&jP(?s'16Mt@a0!@QΠ8( FX?1Et@Nb̠XG_*bKɕ~<7l/o~z)l@0ᭀ8T IUN8LeU6?Vٴ߀9 qQ\X뽗Ȳ&`y , =O?[򳹷r?t!-]ڪQ` K EW;vC~QL>g;U.[v_'V -4o}C!П?ri\:50*Lʰd$a %%#J`Xc)׫CnYTL(KX}bLc TdlŖ?Y~>\XY/FZ_?/߶_^ş~' uP/~5o?|y_:~b|׎N,?}_p۾!h7Ж%/J8 :JR ޲iΏ@PʒDQ]4x˅7\b67PWuXa-R.-UHc FC־7aVJBʹ5U[ soy{dPZEvsrKPfiHh#H 72)$Rc* fB+YCOŪ۟g)u_uh& Un?Lo֬}=*< CKWfeF|_;cy=epgɏ~2|o? ^cx:QrIF׹#ꈅ3,F7׬0`_UϫV>Χ7xҟ7EU@q zG'O/{ߪ7jZ/*v}};2v`, ZOkC2]n1=𠿵Zk].gx |@-?xAIhN[/Ҿ֡CYi=͚)gx: 1͒4r4v.l+q8~:-k~Ouޟ>QKݓj2>T3ݎr| \>ȳݧGFwzC[  C;qhǟIo7>?)&y^UWܦn$/jvWx"s ?io>6ʹ o'^aXJ>oA]GoIZ'S'Um^5A!(1q{:=s.֛aPIP% =*xsdo5 h2PC2t]~qOt_^.usxt$V[KOa!?LC|Ap;Ix{~;MK%WBCĈ#w8:P17;1be^m sOiwyX=CN" 8RGCߎ9WնI>O}>uݎ &Q1uRw#PZ޻0Q"?n^mA{ig?eIlM([8͟ LDri-ד%s+ 3fhJ%dh.HY" %߻|t|}F =<~mod e[QӰNͅݻ%(橑\D R{sAp3I𸎄R'evU܌n[Gn[؈m:>n-qHL[S8GgTcK0dbcaoWh/[O'/zp(Aq̹J߹v[0[~zoFI^2h d/ܭK_ ˂s-Z&y_/܉h/JJ*+)maG+3: %mdί6RN1kP)\e^|~[\UCqui}Aٖڗc \iƵb ZQ4vXr`;O2_+Z0\(\\|KPLhl뻶Uh =a'yдĈϯ:Bmc6k@]`Қ5z tnz X9FՒ(VArص`?fDXk50Sb 0fyCDk4Iq17bFBxn1Y̫:s[`n`8#y  |ނ޴p^G/ŝLFIB7i44&_I+py)&MV_i]|Ŀ{FaB0Ti%YJ oL+-:5 #L:9NDQZenMi,{kUN+ \M=?{O95M=1 q46S}iIOkI/ }{DLim&$URŧ>N@]NYZrL򇔥[oA&RĞwyg,|χ<]^ }yP p󭊜7R,)gx2dH.l4٤:%tOAFeQ9W@:,]N/DM%[[N˗[g |4Jj"JZX_ֹ'ɑ&MP?r1d좕v`5X~~}0dWV<#E8S`^~;Eǟ4òMoڴl%%yx ٤Srw]7 BMO}":*Zibi7N iwb*Z=*ڌPױqZT5jTj`G<$Sc.,'+81[I?ms:V^,ᏣIXi`5תܴO4 JS 聁G0+,)?,W5#/Vu>1(A!Ԑ+q }V }*x&ɤd*m -q|#5U`-RY~{7Xhwomminv.MwLhW5hQ[1'BbIS?? <;:loT @l^8BTV̍Tbi-P1И~jZ1,r'N?Cg8kG502jt ZX)up7<@ kOfG8 g^TbgFaW˽EVWA qkK),&8C3$cQ쇻sV[C;6*eMv`x*ox%`=r\ .3zuhSϗf(u_~BL IbVb*x](&\~blh^5wy K6`4/|`AnxInJ6H1 _IϿe1rq*jW#Ru[p7P k^Sbbք`%64R*bLitX 34zT1V5"֡KT(5_ RIi ^J{Ǣ;SIZMYS1;6R`R#U#[By1*Pku ܹfЎպnE+r9'0G[F­1c*jC#RGL usz.,m`Ũ.棉.MN^XŠ^K/4LC͠5W⋰5X3{䌄:^ ;hgSj$f8LØ܃ ;V0i j_JɻK+X)Vo)Jb몠aVK^rP3hi?^_ X\P WTu00ÿE]Nc L=gZn{bRLi-˴y10m^6 ~>0m^y{WKi”'Zɹ}*C؞7ٸY=ퟞ#hZ@ +"šrp  QJ+S@Ŕ#e5]cy150[݋_dh`|20P#ƘϠ9WؙHV Yz[W3>]uuQo{4ݘ6ح\@ ?\w/]r~*peyV\=K *\ӑK,t L-`#[*[]1mG^V%_!gm#Z닷훃_/OfCSq!\0Fɲ?࿿/>„-#WJ>q ?,D9WPgd9wow,r`Gk: mz#y}Ztuw:A38=Ё{>Rq(L>R*;W_V_ZAek()w⻵;xH ^QI 6瞖e\LjkO؂4V pT?rߺ05h:;pon *x&KoxVL.6 eocdAY,h&nPTYO'ŶWW]CnxeqWʅ쯾?9+"}~_bޏ_ Ps|K~$>-jYW,ݎw[i o]yD|z- t=Cjh~RO ϺQ|ox~rWGxzZ3NRY\E~aG5=ν]GliwB (޴xi>Sq/۫ g޻Q(^t] tI;}]~ 8ŇD^~2vV^̟.K-.(%J?tgKO>y:r*@[Ľ?41+"Cº|ܪxB/$eThGLyx.Ue(A.[D8aZ'e'@!a[3 #ۤoSYp6C0>o0wdTi~k1%ir99w1`#Ao< d)SĘAZYbJK&g38[f%s,"&Ñ8iH4|ީMz-Z 焬exqvv}j/5R1.i#bF&ި ȤD'Ne>38k Oy/m0ct n٨h#BM<:I2ȲuM 3m- lh3K'y=ջE C 8c]Cm^1!7`s TA0H!o`d0H>55MthY03N 6Pr1'|uyc;dfxT#m&e<@K!Hum,>1&Jn\ TE2,Fj|1J0%SlF8GsFMT+NRFGuw<,٩譇ìs}Zs^2h<&̯CZ*n%XCVI1 EYe`PY̕3uHN@r,ɵ`!OI.F%-EPr0 0E* h`"}Z uvJYbp>BƤ 5&KˀOјʉu`Ih]&08xL&I^(÷>0F3ZS Co5=tL'* 9)xyUNxXHFqL jp:u`!˺SmP<.VB\1H$3:C/l##*0 !#q nyVԄBam#8 hI w9Q 7Ǹ, %tVb`"]98$4ϊ$$w #„ jR֧]>Aɐ,L}f&ǫejfHpK{`ۜ'b."(IfjwCP83F0B"0]- 9T9p&h k$F6ZJ9`筍 [C .ZYS ;̺ %d2K+ޙV&m<8l=X[ix` ,p i/p.JNp2bolD r[++v'3x">f9u Lr m+90!;.`yYL0X$ "d)R%W"%iH:o.YܠA{7qb)8Yp>Q )Pl^vNZC!'D̿ 1`lbg7`wܘe>r}XAz|϶B&>hp"fn>%‘,YeU2){P\!Y$REUF"1,y _6 5dmEvTn~X:P& :Qj%wXv^yM]Vg'—dvulw9׉2Il1EQg!n4GOʖ=†p0JPI(jnT4B D?!J$RN\NA_n)xqy{ ve)A,\vtwәNovmO:X?݃͑C%7?-Wk|һjkm&4l+\~XJ>\-ͭ=C8$?K:ȸɨvT 7uP:@ya u"(FaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaf:PɨvFF0ڶu^auh5aFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFoըceu0l:gc~FAhu~FTFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFaFav:_|7O^؛,^p);A77ݿ|a^Zx`p~\d?5֛}wm-~X cKnjgu X~?gڕ 5[+WOT_>\\$\ φc yW ҽɐ{Lc3 iw˧ zרtr.`lw@H%>e637fփdq,\F6ZulVvNiLj75fX9L%uho,[bK8Y3cg6X\b6ZqG vwZ656"hg5%ҤI#kgLv9WvN9mLjkfRZ`əLR|rKzt܍{ 8q'Fa6`5~zu=q`W@d`IAfO%707%}{zh[+tz"ש{]5˞˞pޙo hWey~b}{R oW9wXhkJ)wefCε O-l] @m'Wnzhcۄ=_ޟ]]_A~|g9@Jo\fXT޵]֫zlnuZ?Sy73y}AJRȭ%{+o.?}r >|wA-4UUU|j󡯦>_?/^n5,{cfy!r[rs~[?ˉGkvkNg:`wF陀5G<%,Hn.1|v.Q\F6=<aw\vGT Xovd>kq~Vօ,OPQ">bu#>mj>6h>p})'П7ҩg 6w#`m#\X |d`9v&`]cg<rnl&X Q S鹀Qӑvk!=6h%5W"o#RU9~2,1B*H$P>>03eb~=CY)@q?g90-bKB%Y9R7킭v9Ji-m. I"mk<5~f7[LO(uw5R ={&rLlM&:)qLn13}49eҡYUcC(KJUY )jJFoUSTOg9XUԭ+o]Uvf&XGxx;@*k5SEYd&_l6l5D *ckwZp3&j2j]^u9E"g'''bhIo ŇKPXꝹlS5[6IqiJM#$B"Sm;ScLf.MX`N6{͛Tb/9%|N~ODF0S$Y?kuuUd,v]QbeXDe9w咟]Y6f*|/DoTh]#QD_Q)'OI"]oTKVT)lq#Ynj(Ȓ1'v!g5s)R/Ss.7'!hUE{r'gj*uϬTC))IWm%kTwIGQ/ԨcчCWMq8*ƤSB\RH((E?a)HsB5ي%:qxA催ڨjɃ_"$ҫ}.WP #GqкX3P U} ` ɨ\S>Ec=uk,Zad1!K-^amnPB B! եfQ#2L#mTj5h90HȔm&&Sq{5`Qn|SZúup;g4l(*LV,.*C/ H2!VN 5!1ѕމzl Kc6HO#ýL\s \tZ E@P1LN0أ(6fa=gj# m "HA_;wMAQlJ=2L ^t\n*.S\ jRcM qd6+ NfI:2YPPk@oJ'm4W`Nj+(_X,8I(&` +hצkd'i\]AwVrE`B2n`Ɛ)ur@ 0L2麅F*z|-y L7iElS H'@uKyh!-ú4d̳p+FHq" نsg7PԻJK,RhPyw!I$>- x^0ʐ>h.LV;` >:,80NӚsUL@K&Z2q𐬆Șc0'B ^͢2Y]`[3:8$eN#FCkG * ݽ!-4̰c6RpaP"Gx rpP@\nXCv.Dxspxp Q؁ #!@'yhDʵBA+dXf6VA"O + NDQuƚ ɉ!KjFt+V`/( ǰɅOB"Ɉ CzZ,K`~x9!(Џ$=r Ĝ\s8ݑZ)c!*X{DP7 'TCx?w 󇝃2u,OMF)A"TG ܞUq d-i»tH3HA˂G51E+߅H#qz0ݙ#KXzܓxD 8 8\cBG`[`mx{ piHuPuUt"l=ѠYhR2NBF!.XN h$xipHeY*pX;`oQB5?L/Kr!q=dі6aJ0|LrO'7y|bv쯹C.,/njZ#ųV wk cdm)quIhSq- n4ʅL Z9_+V?0&az|=^ ~b1L,r|0Y|G<)qr,hz(E2Ez(fFNYoPkZ[w=7bDC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC}z(/⹢?z(7z(UC֦E 顚D衊衊衊衊衊衊衊衊衊衊衊衊衊衊衊衊衊衊衊衊衊衊衊衾m=$>$DF ()zoQsanC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC=TC}[zehq.؈ Ҷ`)+ARV}p`\< Ŵz˴h4h:|hճ`4SMKҔۡm֒5 XfN۲rO $2Ꞁeր{]5 X$"PL{ '`-R Ԟ`9!'dKВlUOr=+cЯl^灲U$&hٛf)5``9F9qv?XJ9'`Y.qV=˩`/f r( XI=S*uvz0'Xf XJ:S,yγ'`{ F=y_@1*5 \B]YO'`蘝6D{ IOR&=˘'˙`J`%g=/f/` \ ,'9xE>`ySYORN=ːH# X?e$힀\p5Y&ZuRtOLI){rM X2'`9؞pї"<J䅋zV FL_&zl^Di ,;'# X&}iY.N;0׷`?V o0 nk/&J>LF7iYlf .bt)Anꩀ anpW-q'UBod2>Juz!}ءٵߝyk$%G眯fYޟdr΋icRɒ58+g @t-(oXKi07N;;={{XRZJ]q:fQpq5~W9Lͅw?+\c5 Yi'G}#2;7ۿZx 5N&$be$>*hYsӳ]_r?v*e}q,8;qֺ?Xw`=y߭zb22 NyOo<@̛F̫pAf7ܐ"G))G.?ֶ7.?5/.h9!'a(>s~Ɍj&W&Z@ ?̆Y^Ym"veIo5I2ǿoY3--wf{v=?'MNNWOL?1]= 'eRدznSJVd2\BW_;]ؚ/tSK#`Ato*å2Znv(%-t WJ*#=Hj*å/tѲ2JQꛤ+Cպ<ׂoW(׿`g-u>A<LJˋ(hfz ߔ ֔}\2? :wn~_avJlhC%墆n.vћ]|q<:ߌ0.*F js^-Ăl:-f0_=f6 [NdV\VBAH[]wġO 9V;[;}Ak0ҘBjMe%W\VVq)t Z1} 9{{<#slf5G :v?8kbE-G逘SJNcty'~uvx#Av{Y\U=]b4Wn-~t'@ni6,0lߜ? C:Kl>"/ip&ɹacCo)s&ύs;ƪxc~nbyXܣǬʗ2c TQ iB V86\#8ӵrd}$ ea|= ȭRC6= XHX91qqG[_Ea!u NF[yY(yKK\gS[+-ݏ0@=3Z1PsqQ/}2 n鷖:(u]ɧϝ+^1Fln8դ&pa_& }aa= n; <|$ti>wi@3]ew;aWļ3:dF$b/h/n|11˹S7p^r?y\^Hrjs۟n6n` 丱!.9bF}H?hmdcӏNo4G}'8dk mz<ЄSz rFnohKyL#by!\3t&=߷o/iwW͑al$D]YgL%OU^ )LSOΛ;<-2J4XKړ~mʚĦvo˛ۧܽ7t):_niFoFּ/tWY}|5/J5 y͸*Zë_ogEF4٪~_6s$oggAkhY6} m,,}iA }1^%PXb'-~ ڨ<jꘘƯߧ5/rei3iv=gK0؞EnCϋf4;vf7IhU7Jn>Y>`pj4rtnѮoxs?Z-жZ«Loyl֛q{W/NnnXL{`|_\"ne{HnJ# 뭒`=̇ ~MH%ոn,ߏGulVn10`)EV'C[ޕHr nX\~: y9_FSuxU}?On~3_Mokdq>J)`^]%/2< Ԯ4(d>>Ð_mA4~ SNaNd9ᶬ&17gߔn%{#a١ vk7O)uZ~"! t ;c#%e| SMfZXM 0dXlQ+ 䋽^{^;~ &f l58̀'EEP/:myv+bx$cȺ^NuVMdmsT׭v؝%7;ZѬ餪^́7KYjqk>{xM7Ы 0-SW7QmWzzmߧq8q0m`Z/&4%r}Ig6G<] i'KJ,ӦJӢPiAn_Z1݊3?IBwǮO2Uon7:F\HQ4DL;hUdZEkO^$[E8v2H7w&cu>_,p3PD񅧙3EF#54v,ਰޒ$uro<&cuQxH̢!RqIhׅ:r|dL q-m^?L>WT؜$ qRxgƃՁ(9GU[\g34W0<hwl5ri9OLDPvK]4RMI 18*ƁC$qn<ϸ "sICb1PM&;VcWXojCRi_x6ffzQx:TLtE$:wVuj0 +Łd.?ʞdEab]R,猸Bf&g6p%1m+Ł8H5 0|s=x V)hɵ40ޒJt|ׇMNP0J92*(ǒYx ;E4[xKvi7 D_D c<0OODoԌuD%O$71c$Bz͓l|So!U8xKZӟ<\du^ $ IN}t .Ђ(9?oɬ7rG ~1Ue9W0QiU; ޒvpf^[iE<Y4gEm!w1M+Ł=~\W.Y8$ s]{|>wb(CvwD>Ə1(Vp_N{rތ5\þfA_w7YX(MkjE|z+X R(:s,5KpD7[bEc%u$Z)Э6V硷+ʼnno5]˲gz!V%OrEjw N)ApOp)I0,_+#wDhfz j+7 Rn(:}x% kB˝0K_VYdRts]FICnŇS\'p)ʕΥ0`a0rCuY֣~A-\'=DUu)Tw/g%թx vl% 00ukeKZ}}_|N!56\o޷@ҾbэF~9bihh*:rr>,re +eqe!fS4=8€jD^t״x}ij˼cpnNЗ7ͫ7Pmmyo =Fztnjϋ_CH瀷2P+e ( ;MA8zn{O'<ߍ+Du5UDŽPvi u,h ?ةiY N4`eޑL9Px-5J(  oi׭Ʀt#C5T .Ɩj}.i YuEOfPQ#HYib >jyӴGns=˧*^=&çtY=Z]V϶rY+h#BJȸQxrw]L wqm{" LBiC@|RyҪ0e 0Ei[o_F=~kA~Nhpc !z˼=3/QVF 9,YAMq!QP,yNEP`j/3M~|?x[&gUTsu ~}R12$U(n %T+W>>Dx`)ψJ91q:jԛL+g 0Sp¥zJMk x[rًcKb6#, ӆ]3rέ(rrx_L qma9F xyfU܃^TBQ1$ƁFe }xeUh.c__s&q&:|i|>7u/=򹿴_gY_j*"%f5R]61'54/88ǀd>lt$L2T l`3]Yu--v@" -ke_;zlaui֛~4x d87""2\*,Q3 pTPmIA R)N"r3J AKɑ Q(LY]ݸF>?Ӝ>~H"5;`1Lbm"xI+\CLfC&[."a!dz10D,W \|@W\/Sd2D1UA1NΓ(].˅opnz^|n017kh T3Gyh154L.ZO^kͲSE/Qws5*i^]#d٠Zv'RӜX(B.hZD4 qSz,*}IA14ʾ46u܃kC8̇\~>shyQ"pTOoQYc4:̷"v10ߦq-z[51 `={s:l80q} Xx`}9ZAPۯ pmֹ^\eXr98jܪO968w΢Yl{_/t:ʨûM7l\ц= ieT[ُn<~zD m-^ 2PϜ&f`\f_ 18*,ǁeyǒ,h .>S,if'C edѮYFl|?m^f1-nx[V+=3+tw1__T]* +*lNItB„qXyfN= iZ. 5wm ^B'(l{O;ߍk$(RmZFb5Fh*6-n\, l~O8B/?ݜ@8O1c Kf§ltcbC) t@Sx c~jzg4q.߼?p5p~7]'}~̾n嗍K&\&/F˻j:O(vm%_8F, xe?wL[fd!t4Nc4R XNd Ϻ3o+S~D:6> cańY-.OWYqSjFrh2K0x%[px3&5a5M׃^F6?M藺Vş,J踩L̀"Mg"{R8^2惺QAn{1;N -J$7qT`|Tt)'77ˍV?~Q؁cp}ק$t!; b~~}7^8_n ~YK{4[bp+{Toܜ_O=WR[[?8·Am} N,ਜ਼$r<di3~ H9M`V½w"ѻb vk7OKf巌.h?AoJ/V&h[bud-IEd1 DCdc1~9( \+^Z=}2.+LR!=t's0OhQJ" :BQ+ $%D|(0yH R!pT5 C/AY.Y sXEݳ_$ -wB'kgKjY;Rvx>7Nϐ@:m"*334LyTḯ ɍhS(o84PݜzdVgq7 gI/ 8mrHp\r͂Om~GI֣m]j(^>IOEWzIg8Q0Ne8ِ%ݎ Ԍgeꎿ jݡ)q$)؉<A16ˀM ԟV}|܏ 8BќKܧو jGC |5w#lPJ2A>`S4cA6Rl8Z2,%"LlC J[NPA =h`s=w.p=3xE@r[SĿ3dE8,CIIDq#ĊD tRE 'V׆RCw*ASJ}d[>"*JpGv&*VZA8A W qQeEP%3du0dqExȲ&H?IRse)[9%"f(J—䄣q ASI|HɌ,VK!Qyi3RGW~]JǖI76l$bINT'' %%N?:qRF|Qa F[E83HAs :Ȱı~kwT|( 鎮H>]mQפA."ce9U/]w|ΥToW5 K_P>s9ΐ8nC )UYh۬z7-tޮ+$ad!ԣ,ݍq%pꈤbƉ٧d"oI߁ )rIUX-y@)#}-yO(neaps$R9uŏVQƭ.Kl7®>3:ؐaXTH/-#qi-@SpN# PWRXF&awveQ"l'M$]2ؽ*i&`'H'M@VwG d1ift"^O'=|*77IgtS$t2$HDq vQSI5Zeheu4q}tÑV_Tczn8"jH'8q أO%vsݖ[cl.vxT)RM̘qHpm)OX]J]OnyD(ezGirsr{##F%93q1\si24ъx!!Ix a_vS4ӎo1x7'Iz`Xxd $sDVG)u//HiӦRY<+zbao[t@S=col@M.Acb.H1RR$Ȥa4= 9.cGp-J L Q$j2ڙt)C6oǤܯO+-3 雨 YpA qơe͐{ކ{6} %:REƽV"өn|V }"_E0xVyENLm(1(gÓ̫>B{Cͯ|w)CZlCƆ !c*={ ӹʧd =1a&x=gXEx|"2FREI1+pgq-u9?[W5;ZvB/bu%^D0X)# s՟ɿs]Կvsa8\窐BbLpO_) 4EiwVg N+E )[V$OL‚TXӦrl{-DmZK+2= ?Zc*|rY9:' ͉_ut"6L{ M _SP7O!{wn8H{=^?~HT11Pm%8̄0a,GQMowκڭ غ!CL8vc@4i\Z7`ֹϩ-1kA2Тo*p-J$XL9?TXBIq\6Yh]s?"/4L?lR XgTÁ>:G'.rg@WϊY:h/H0c)b._J'~mx1, =**6< n^ y >Ћgڋ8Gj=v7VJ-俦3oGp Sq-"YE8兏 [iB=zKy63X!bIkGITJi9&wWP'VAoXbK/Sv= 4tm0[4?xk1{8Q=_AkDé:Gf^ˀ9,_ˀ:_̺K3p @0/Sc ġ*]GAoMU19N227UBFΔc,q 8`PCL G-HGs}y|/ hue8/!65^4oC[Ckw)"5?tpy3a1(u^4/GecOLn_9j}:>bnwzup,')euMwB_GW֖|+ܔMZ/A'}C6N;vXzp+vq;"_iټLx9]d3%UbmwP=f]qۛ7ՙ~SLԛuc''}L1#O'Пz,umݿWE,9Q[k'8V'9pO՟gũWOr)'5]vk*CGJ a`y]+K`r_ ]xNb儶k0?y's`o*/_y"@s8%+@c1(՝G&1עGxѸEޖ~􆽥1?Ggq|ׇw|L$HDqN+w/(C֤[UDj\ ϛUBb>> 尶Ǖ뼘$[hLU~T%QREJ~VT8u]qNVbNJXG c|t  0Rzo{u e33kp o+ Ll"޶s8c ȇyno Į{/$"$ yNjiRA{<GD"ͪ4iEPji9160{B4xP'dy]}i[K(/J}({e4E:)_g=c_7˪V=c,!0Y<`Qz8;գi$G IasQeц !gj1vܸK-%b\T IP8 +UO)%Kiy)SxW)bK`7km 6ixg>~:ΟOgᾟg@\՛V 4eP]M#jM{{|P&TeV1oi,JO:#2%߿6da:ϕ'lk$aV+0$T6cR8C$I_v;I٣CrTG7#=LPq'<(,LD#3&9daSIi=7S'Θ;-H s"Ba䲄!9`.ASI#ysݖ&6Pu5!;dO#3ǘZ6H&6OճMjaSIMg$PwvϚeRIN??m|>I;X ',m^ S)/S;r@R"#i$rF)tf<ִ>1]41ErD7kЈ󈨰L3,XxQCt2Hb#VeTQ{t<TS_8Oʤj)4b=Y w7<FQ&vأ;O%92EdQ0D,2")C68b7QVK+-3 ~ dVQH#PtE0>  [KȢQaS:Q=9D}} By8":J )Ibt73TٸN أ\)rFDjAe/8y أ+ASI$[8A>!@9GZ2ϥuSqaAhe]E>Hl pl7 !V9IKH TřIRiM@n$|*ǂvPZR; U\C֝xGGX$+)>:O= >y#sz4)2#AvL([orW&˼`4*}|ASi <:_M,lrIbz}O˾=l352$WP& V2R(1+ ű(҂C6Y أO$$q(d eY*Io<&2(1dG!@)! N!&H9m/l^ܳXO%OzٔaA !WЮ|tf^w3dmNEETPF{,4U?䠂?Ǎɿ"va%@>&Hp`&35n^_zZb-kĩ"z(dcUgdꀿ?kZtF]ToB+ [\Xv;fFVBckF){a6?;X6, [Sh6#mWOSx,J"p)4E3\F| |Ihz[|)0+F0o#0/&S8 19RBˁ5E/VzޒiۨI0@8uA̠ 9 ôE8-2l9}?m# rƶDu4rߑڍ;Ò}p@`x7F24a,8p0u46Bl.աwir#E7, gH#t^Jrи!zp=Y}TjM)ɏGOAƚcϖrX~F> ,הT3F-S#<ڋaT[x}if9(@zQq-xnD9,b>fk֦a=e +CvxX͸r8F.GIW4b f.h8lqYĒWÏci Ǥ9 gi_4;A|S/>!;M-S8 ݱ[skeۂ8Ւ}jƱinplg.!&9*gfb ΃>1쁡[l"?w pV2m;ı3I)E$ L%˖JVn Ƚh=ϛHN=`q)n`qX[U#۴`2ڑxkh<—]mUN󞀽 #>\^-)^<辝^ʞm`gb6,4vq}X`GSh`fu΀USf~fԈeɭ'h,6%YZVGu{&̟k݁};+7BFq(]Z+bcv٠m;(On8켼Ʒ,}R`QTK4c ؏cŏ:OZ^nՎcϞm+Msi=mHמ^GQqʞJuFiu'z$Q@WKy/{ޗE3|ח IߎJ1]^J ]X1⸰qXs 1r-.[̒MW .ּ.>,W'M;Q”sзVx@$\ w*W!!4ں">%-&yi&laE/hwi,zmb#nUl*|AcڭAuwIna?Ԙv3w*xa-K+*UL~,zk4fq."ULݹO_5_%ܽ ŵjsBdI BdםcIk,%N?wP eVj3TNkV<ۻ3%]+7xW̺OWI أ+gHtGYLrerc}9{G9/Ʊ"&yJJ g!nbp"/&0`[4x^\]{tޓT}+ KٗyN y7 8^p9ldq-vkK }T܀%V>+BYS WW3UCBnk6Hu[`^(HY}7:H1Uk[mї*Ӯ V=sd,+%I.*>ߖK'ӻ9}fSיh>v`_$3ziwUG-EH3_XCp=gf$G2 ^UMԃy_E|WEvjrgwg ZiieG%k* #x|Ykq,R X[]W@\U& n]-e &ΐzT е<~goYgźp.*Wu}&IVft2Pxa9#5R4aK̽WkS7׻7-yyūENɾ3 P`GG$ܠ LͬfU 7j"|vͧ" \?#q+ń٘/JAf6' {?ѫ9*%53S4QvE:lXvl +-Exs/xdƵII硿Y/ K&L՘Νp?-?A8pppiֹ$(grXFѰfvGKb^&Qbj$?sx.u 9X>:#gJ9nl|)q^8dl yůM}YO“8 ♣HW@p8˔9樄:e>>kF`, HHVZɆx-oW.nb`|;m(fgt'{3dѰ46{uZ`<) %fd:;8Œ<\"qGՆŖ6ޙXKAV5p'@rJ`1 w8 :l(@z'~y ~M6HEeOuml2{3~s( uՖoy[[)Lb;;W\kMpH*&X  OM}V<&{5Sx48+]L^&8͘"&H*3:+8qj2Vo+Jgq*eDq ^B#U"5{b]^ )S9E8)돣gs#{8803@9SO/g]s4ߊIh5pl$_t2ul1~ D !& B(ґъg)W/wn1z&[)Sbkf(yS70}W5(ŖIL>z5N z}jP(m9y0;)U7 7:Al1I‡@Ilmʼn͋1HW1 1z[z`a=Y1\@BH.cF'NGyE>FkYW?W,2PP#GNG(ʭ~soE(2 a\0a"m1ը48??SbB|(XchG/7 6}ppxoՊ.hd̎w5udr_||_/[Sdl߸ՠ|< f^ c? e 5+&=\G\\b!طV{cE* e~i}77E5HY?R!ziM<`%9˂~s1^bwZz#2|6j=dn5cqa}G~Y4-SMY}ĵooϊI V}6 .iheMV&\LxaTAWihTt6]Yg i\Sz×2e09RVp (92#9' bvt_r}_gZx %|rU}:sd`'%i48Chъ*)؀cm0y8˥bQfCpn2X5(PₒʡmӁ.73>6cYDN9c uCV-$9qR 0c@4"-E̲L`!OjG7Wi4:]D\)0-C<'SR#kӔA{wLϾ]M{@ш\{Tlp(L wGđg'N 1"Tr&H88J1kzcI Gđ8S^')GgU=hƩ 66ݡ3D9}(`Pf,-0!"=,:V ͭ3ysݭJrzQzVj)|Qu ֑mW?/ZO 48gp18D#9P;A ƾpG9C ԻG˱:PIRfxЧU^Q(6Υ8B3 xrd cR!h|@4]h$en s$U˻vၤwLڿ|v&QhafJ #M$(7x*6wLq1&HY"G{+ِ*g⌨ =15pok,>pZJ0 AJ,RG98yx*^8:&sh'%ͤ&}ڛp.}%$?:}t[L:ϰf, d8Ga9;')bրcp~; m2 ۅK쐉4(L B1[6JH*wV3ĸcfh?%s; Xv!ӑcst$ &$./fXF>7bUbggݹPV1ș@\E<ڪ\cBPCI~;dU2*=JmK,>5؆5 25̷08qɿb-vo23;pۍ%R'夻߯(ɲb[ e 0VEU,)C6!:NAE *K=G1x nĩTTqtE\h+1:8қgA>~j?=2~Da<T4.F{cCtfYuKf :\IѐQ2A!3Ӭjvncr\%[Nkҩq{&~ k9UEz>IdwR ;G p0o3q:(b$iC<=өʁ$AgxAրg%`@pۀg2u͐: _82#%!MfBZK$1XhtEϤc "1R!E&Zt Ma:_Udm*cz&K',;IPŠt 4iZRow|ßuOt-3 8.0<3mV)88%$FTY11=J׷&V0Lq$W4q.Ǔ H;fAp.ZO!U8Hx)NPSg tk3J!T VZj>KrvDFL:ھetRrڛgY /3(S|Lp=G/^ke;7oQJrcCEdX8@.ck{4X :Ķ[t\Ou0z+i/)>FS4Mz7pϔP=TrZgL4ZĬ"E mr3q:8#LIY~ACs I!ּJ Ur06ex0KAԮzauzLwvnSc$ %ʩ] UϤGv(S*iti3qDb{ßq{&c֢$">q)P(uaK=*,۬!`u֧aB-;W(꾳ǻ~4uS\db`PRk8spݔ8z&[t pL}iHfpL}{q#-U48|Zܸ<Vs j 0t0R^l$M B3$} m\Ц g*tp4?1"?H˔cZvQb }1Gz*!M ϵ>Vu8~Dl0hoc}!yrT4޽ϭXr#-K=4BcoV` GDǾJc[(v 9Muv3K D$ʠ yt~B";&r چ3h>7-. DрgtpH!d̃%(u&۬ H}%!-LNR ݚ&tZ-V%~%jy;t1my7 Z2@B|TUd\ȸむZl (z b2,?lʙr-9HN @!`IQcR O0Kó"G2VC~1I 8{9lCn=O3x HF(Dk3>c䣽_}3:$0tp?&ޯ-ɡ[Lz~p>v/v7C)3ktcnC=; m=]}Q _`̢]WEW/"m[pM b\Sd{pO9_ѠcoLsE>ާ-:$1p|L]QƳY앓˘?Dldo|CC;V/ dg Lo6Sxg1n[컙aeM;ͽo9Ti*?.OO?gf}bg|?l&|; ς~UYCG_hfٻ'av>?>ǟlt2+MRM-8+?D'k\XI/֋uUEJq_)=T^'?烹﷮mc>yS+7:d"Yv!ַpqm`kkEK-cFcGcmu i9KKaJVOoT]//9x?u;V֛m\quŒN *[r ;D(Oh@*hzoz^C}%^^z05=ff>Ƿӵw\\Gf ѓgMC)հοZז x3fuy"m$?0Y$0uf0Ԥs<]V޹~ד Ͳ y?|%W0fk@L$K^PSVʥ^jOx :੉[LZު:-Ǯ:-8w]aEX6< 8Ēs8]uoJmS kP`QlsX$俤a"؜5lzYr.KVY `~'qRb4HKP|6 3g эakPOʡt7? D! KtBhF;).@y!j/;ұks(N(GZP%mKȖΏE͊*:v{զa~ÌN9X&) :F17#1B0F|XRUlI2"RL guXH$2&tM#D{n;!S;/ۅV~ߪZmee /s\-9SsͭXweUgjǔ eꌋAmILxIXF4v3(:>Iɗ+_Au\b.1W 'toʱs^ٟƍ™guhv0G8RdRA"V#dZ\jp1UՈHIP"5'ZD ߂`*=mF!U{iOp1\@MV0=_7+K^xE+){ %= X`N\Al[d;Ĺ܁[:"-Qٿ~^Ӈ5[` JéР;.hHnoUh=ow;8TCVkrY.(P+#O'eC0G 蒣!*r(tN:u) K rL:=R b^54v)#lEC)(!RK %puJ`]ux_cFpC5рMSǥNr0qdTӄY8&=.9Z@o#|/r8P7kj`(?: R(uE7V;62MK7|xJP&U Sxx#F&p4(zϒz'rǧ1"ej(Ku&JX ld#t3rhesv +h1Q8*؀:? Aĭ9RDVKLjhڮxȈ55&h֡AhK/:/7(Y mIĨNTӋJ hmL Acn#8z<gS ?*SMn^`R*ktid}>"k+%%D ) JRðIH'= ȾoOB5a+<0?\Å^a043/5hx'l嫑~#&o9`Eo WW# ֛P ,ЛL姆aIZ.̎7/MIm/O=[L#kۅǍ+| > ]'@EAs`ԩ!G$G#9cx6]rmXbUUޑt=hsܒv2r\vnq~+r'&xoz zDKͯ P=OTчgaɁ9) p?vY,Hu%\_m@f߅3ҫ6ѓj,۞^Ӯ"Cts lP@yUZԤq(wx[r}|?p%'rWZlfgQ{^]LaqhKz0meeYi-GF93Gʟ/+S?&g$NRjg#:ĝp(mQ1eZaq<mhw>T-S EohMa8y_ Ȏ`J39Ki8iB"w1dҙtNzr^ϙ!xt& 뙱$!jS$NdIr.<Ş8b>QGTV`~#nȢs{(t㟻hlY{Muy&Zls8Pnkmۋ? 4Dp|>9B1-:!,&=~MX0j2GesK5H=)_c[Mg 0wm[׾}n//."hXb#VG#$KSbBY8cc|GidZ~w;viJ9LIUIaB TR{ya51EU#JqB*23`Njꬦ`5_Zy 9Tg7]0,㪗=tW]lK*ڛ\vZo.ۓ_,햙ĸ|ΕJE#)NZ8H8cR'4Xcd_+Ki = !fT;NДO鸕KHt"N =f8Μ5+YG6ߏVr+{W + mɼE5oB%27'xAL^Eb1Jځ@5d+s[ < VU0*YìN*Gek._uJv<[(*8r  5E .H[z#wei͉t"8mb+ ~ߵ9kAˏeU)VfLdob9o&J!$N g+zP!GAĭA,!ZCw(\(97};/+_i'&qlqПL;0ˊ,TsT^r'yP@[$MF&J ŽtR$iA8 L Uֳ0j/A! IAʶbXw^3`,5 ΈIR)$<(bQ{,YE6ewҺ֝'OXCq>yŢZUW> e?|NlxY\]~r&߾+ջ{q/BCpS }3͏ɟ)}>kħeo8߿'=-[iw=ZI]S8ՓSshګדY"|ٝIsn墭8|WC^;x@^,|쁨|W3{}qQ+ۛM]MS/0'̽jIO#%՟NnK(WyJ,LA7ŗcc&RZZu⫆tٳҹ2T4 Pued?o9߶LHawQr\uvW26 (0NJ(+_PmAzS0tbN '3_5hJ1cjc&7zS2h}&ʊUW<%jGkT8ZZ gT ЊU.$O[V+o*K4r?qZۤ@ٰz#o9z_EhCp+q|aASls$ &#tB XŨ N8NFR=L02c~.۬.5^Ƙ8ݡdC,vUVNʪzT}csH]ڱժM@p4y &U'U(U(wS3k7msQʕy>%?WHd09C^eE]E49Iy>k]`#eՏ**_O&!Z.X$a}+Su>zmːXPfJ1el|+\31= 5/LQn^/9LY9#jPp8Em\p-bs7f+7HhǽJx$0':bz*UHʫ Yi|xN:T+Ut{`m_w z=t4%8PoobKbޭq3J vvӸۗRJ/%~iPCYFϽdhg0 &~ܿf6^ \3)Dܩg7q;[ /ܨzmZks"[ q`2VN5cud[%Ub4Xj1>8mց5'N0k\_95!1}Ѹ1tVg6~J eD0RtŋG&XZj8Ӟj)bsNMt`0z'6xdGs(v|6Յ3 gJwA0y~pYv]\wn&F p;!P3o*6n8Bƿf77F,4uUbzu^R|sA^`~&0v؄8v{SIFs Xk*$-ZhXu.Ζ" |sO{w;,'IC,lNLk:8ÝqMDy󹸬_<QGHj#L% 9^8c0ZX~i71qI"vF O9cQFe@;( 4#qc*1XI#CB"_XA65ʂù9o67Wn8q\uV@5ZZcZ<#j1%J12J p`4kG }:bTtL!!,h,X49JҠnmG%1!B% F* ¨XtYK} =$YeHƉPţI # q   9bO$޿2B%H" Z(69dl^QeXF$:F)<;i)/-shѬuuD84(M6:F0w4CL "ǎMs^,89MF+1r~.dRdlRTH?؊҂_XvHNE P9/B =tv#?DnDูl)bS'lg;ճ %E28@ 3Y.IoE-bMw8x% 'Z$UE;_ZKR##r_8JBx.n ";yU&jŞw8"TTO<@?(GI0F"…Lb&H-%6jmEo VFsc oY|.`v;xahqI'0-g6C9'r )$txx^f- [ri7;`QI?%wLZc;lg)GTmdgֻ8\1A4m)⚖Iy7`wK vqq"qYP ë9y- 3GrZwY>fWC(8:&N ˳q+z^MݒDd)4|y~ lC1Y$nѹ#3Ή@ F|<.PJTJǴaipd$hr4Q܁cpejE9 >7g$Fƀ8~Myvp;8rhzSH`'̊,ZA8:%FX|{>tKS)(Dlז nlwMBzY fԗsh|5N/ n2ZY ,2٬\~Wi:ѯ r0]y/-=.y xD#}lpnbGU]` 0fhרhX0Ηslp~?b]4ӳ|0ggd?9?7zcvz~fcƲe W#Lm{p$W=K$I=?:q @ P zX8'&_Ӓ3x?hyI&G_@SŅc\\dr[v*],z9Q GfҝQh(2unفm+=۹It:XP{2vxlg1jz2Zf1PmMG)~ViOᮛ7ӱ ;>_ bt&~AAFRNq,z̝vX-2l ha[0 /f$>,Z8,ogzw)̏Ǩ@NXرXVrެ`MNPSWVrw ht(jɝI۰s[EF4Mp.8>8f * <Ղ0tǴMM'bRu[km MIk7V~RJ])x"ؐ"*58pdS32x;Hi Aftk0M(`:~4;)&5n9p`NɃ}`/أ 6G5{;5 2^ L6ހnxÐ׀\iy;?L[-nZ/a[[x.=ߵ1"ж5_6ڤ3Wn?ڈ-]?j MFuYR,s jXjT/&*Z8S%<4Lxq ?8UWK) ͷٕ~W=e?5L"ZKo,<~kT^Sp㩏 @d!ƊI_8)~>I'e'?<n~oI)?~|k6Fݭ^#=}߼QoEvig4%'^ ˺@Fρ*huge>dNzvlaށG eAD m3Mwۖ OIY-xV Hh=^|T|y?xoMDۤhV H^~yKh'+A1"C^BU, 5PVJꍵ ЏLIySD"# Beݥⱄfco><4{)Sh j-RH\A#We⡋K$qjN=;EH?Y{)y,b]7lٙK.]*DTTXQjCX3((dQ{oR \14fW;ʫxgЫ0Ȗʚ L,<,\:H*ZV&Ak\:?워Wb5_M{ɱ6H )ȧX"M ypY󤜃d9ɰZXέܖ| ܺ7採܍ "}5[T fbN(y?ҩQ _rH];M^d,5QSU16RO"V]XxJ@FQL;V/Hd7c^' c|)P ⋷*VڷZҾ\J0!lYx]rj+jTu0+aVQXURX.uMF[x~(OqwAhukǚ_lMab1"+Oۻ˦˅Z_bg&S m_7/D- mj.[R[{hcO`7+WB7ߦwkmFqfEy y ^co%$w3ߪ%Y9BЎ-Q%~dnXչPRC=YNSꞇ+3D#h+@tOwy=NR 7pđb~{{d| ޑshZɵʴrU=$ԭ#]FC{;&|8sevyoCҾ`#9~Kr0œ5a_:~IO/tY*񺒞=)[j-=,<2̮5t+:T\?B\ٜ֫|.8Jq'Y؃*Re16p|4ކc={n>VL5wGct c1h¾vSfT M5ɽMؙW4ݵG@\\l\>^\,W(e+#hE\l{fV\$&q%4$EYk+}˂Ar@(4Qcr6py}-z.r o_A"`Hw4{[jH&}wU/>O/߉CvT[欙opʙd D)hȥ~CM_/ӟzMχP8_|;[W,SZ(@ T/=Ph|/5i+YM(b֞l,\~ -T52ΡP\U%3%~Q3ZVjȴ1g`B}ȼ|?&u3ҙp O(Qfq[U?B RleZT2Ek ^%J4b|ZWcz`Ny*x\X^=g~&1 yV2]3j 4+29ΓΘPh^xĚBBRbQMz <:WFEÜȑIt GAzjwkՈuD*w=ۺ]QW}%6Ծ̫f˪Ȍ"3̨*2[z-?Wu8?y"} aluȮn<[w6y~<`0 9 ~+ _(SVNP?ia|:kIC26B\>eF*]R*Rb.l=zSAx#%8YBI܅P_&ޠg0j-[[xťM'f4˥i ;|c9纳Y߂R͏WDmrWwsb k;ME3]WcgjL/?ߦ_N1U2%&7Er|;娻'RZ.[1tesYk1\VUiRʖ3dFB#Katl25(I=ȹg`eA>&oe.ӣtU?^ށ0V_5!|fA~ݠRoJ_Y=~eɏ۵ ~³8(\-jI/gw;ہ[M hDeIُHb6??aq{$`,ٷD(x6KgC2f_ȤTQẵ[ \%W@B[@Jb%0_5:QxJCB%Bx#' E) Ђ-+Zn=Phȧx$!Lifݫ|h@x5o)z`7 OS"{C(juNI\PzsXT)IEp -\kSn t, (/2g/ʤO l>,G`+E,eL򜥵IzpES\TQ%'6Ak~8H+y,2-jj$Hɘ|Zל/t ]ƓyO\ 6hT" <`hʟJw$|S춙! uyϒ,1JAHC>̫V}U`F)O^KUhrY_NFac^+SXi6sF狲V`=Qz,gWD_D#96 IN6?MtٟnFE柣SqmWn3 h6M|?æhGɟ?jliV]q?a$@ U 5"DA8_sL -P$i+B% %*M`о9E,L:8`lE^㵯 M{|'Ju72C(ukB+ZLRe'JO!7\Aspe9pr!YV5\Ot}щo`N\b@. 5)FX> lBxGS<q93b1@ZL -K[/>2]E?9A yϙBx 6u!7|9CV~}v "L x.B 5!|qb9 ՛Stm-_8A V@̠O7: JWǩR0^ ^Eb2e3mg< e2K[3ny~?տnit g,5 l\y S Cʦ(蘌^$1=gC(-PsV]~ݢX\vϜnفTpoeEgdϟ?d5NNSMsox2Smcw>Mwl`D%H+^瓥6, E֞PZ 9K Ҥ-gȂ{೵RhFj׋*9IQN k-~y}O-> >e ӁϲJm%7\ +,w:/|:/@7{꾯 џDsbC?b]1P-cHX[xY$O g޵Ƒݿ"y{Kf`  y 4H;Aw8]GLXiMWNVm~ϟoַ\k\e3ˇ=m\.ʗcQןe髫[!eV5HeV) ZUeeΕ*+Za#kpa%hpр24>[o{ m+% 캸NmVh6umCm?=yX..g56Gs+#{xGƣOŕM6G7*g9g7~\.9(1,PŠNf:4kfEn5w+~47G5vzxu= n[~tn"وL]O˖mμk6 $j;0)2P;q(@,L CUӡ+~"EC}^P,1&pƵu} hd)d9]4TuFyB}{,h|xM;7 ?2bf|Ka_d^Ĵw7v0{ R98P P4CۄlH@!XZگca"OW//ߎ*J%!1T>AY]Фl*XIu> ˢ5"B׮>%ޠHX?[C:˛OIPVTRA{LLN5lH)pGFAϿFJ}vR Z~vcd ?z,d(ld*PiI_n`y`rKT>:/.NYhآC?<͸*` ]Ts:K޷M6kCwzy%& ,fʛ7_}خn)A&~f_#6m;FpܢK_gה_#/W{9%攘.1()CG7N))rR\w M9q/o~lQvZ)}Ȩ*i,=lv٩ӛ>e>KB;]=mw<|iys߉ yG{`,'l¦|_`ww>t]377>ISG 8Qg7<,/=qɻ6ce͉Omڑr  }H٦xߟI%ױ"l ~ )6@Nz^ELK{nBὓ <^{{,.ނ//N6+3 ?74'[6(c} &ȪYZtN)VG$Ae{N9@_u7 'xJ>iJ':cF̱\Kr,cca4d``1`ઙkݡUKf́ r$>$r\\+@}p+e{izgFЫ=hp0HWkPMnr+z h}4/! q^tX^7Z2ưA{ 26xyviOZ_{k J=}=[}VaM SW}~;BqRM6>ad4,_Ey rcnW'e$_f5{?wy'|c׼J@cvMMl?}(֫稵2iȬ)բ i+ٚj}eԄ_閟fKw *k6wE9-Q\|[6?Y.RFBYmT|Mv)DUv&ddF4A o\Ozb"9$'BYkV()eY6\Dwh;ǜ-##ngu;PlOk])D'/C)Sf4FDE*FJ]5Šc0ɺ쨵p!h@mOcT1U#dit2Q0ER,B-p $3*e%щU9ё$&Ň)Fa]v]h`@^:+yevhB,6LuڵB҈&BE%Sh* @?0Y44kr.xgkX^Eכ3Az"h4r;Y*6oZJ Q!Ec}J+sL.kqsٲ%U*G:c$pN2Ej-@3_`C^G%=&6@Zi _`$ZɁ_"$0%֦\ N* Ad5$]"kfIddbP[Yc.b&E Y)dU(!hEBW KNn L!Dt 8 zXoR4\Bm xH4v ,*0:Y-Y<V\vVq። L")&+0hR6!h8֙@_X{ńYcV)8ip# )'Q;B &}fE<ΰ+gˋs|csXA63]@>Ih|8Dbu`qLK[z%ׁhsQ87U@p1[;ƍ5⋺ A)JP$CdeBU,|p|RT U0n,9Q_h=G Ar}IEPV/Vss< /VT n .3ueSU_}^XEQH9¶@Mff.+LɈqcU1~v?nכeΣNr8X%XR ׺Xk#xhW(cѣP.)wځ^t9І(Tq]/hUB׀R"e`{Ex(%V @n9h D;iB4:t kTȳaH+g4R-ꊠ&2<C4i29Yh}ڍj:]Ad&GE(MCeA2CCX("Fehc,]iIVcB8"gD(!c+9!kbRl<~ce!Y4Nj$$ʌc#޲H@)LU6Et52_W[@ A) f al~V06eFe Q7.n@3 (°}ݿi ; Ne6nXkA빸i>8sPI@gvId_c^/0;3/kJH5Iwd(^DRXH*V1y"+IHl U7<%g9F}0 HH 3}D/!!żBO?HV-A+OFyH_a/hw<@n}ls q)|Q)/l^\eOW_%(U7Eݧŏ 㹀M<`rє4,T*B-f>qIA^qS"\9\ z+j=&#?INWlӋ Q/.姿_ӂLдQ+`zQF"it' rZ0o~Q Qɨ r`H-5 }l+E ֽoȃ8hVD1|aW||D,zL-v~Ճ@я=~_b_KB`Y<-qU KUdCڮRݥZ$AQVd3'fg +;6l2=_MyÝ!d B u $Ēςr6g8EX^ˮ, IgkYǚ'H-|[e$sOzrQgܐLT9ɐZR뎖-@S d&҂ݬu8`"&.VV\px#*xf<[# 2M3 Bd/},JƃvEW+Bv@7w}<ӿ`2B#o1 N|j)ۤ/%K!w_ ڗ`J?H L_ۊMFn$V"cH E*9\%D!&@\dC9l41wk0ldA+fl} rӬYK$qN$9 F(]LqDo4`%jyUZj X6S@Au4ޘuQeVeV%p6}.FuV8GWMRQŤBT+Xl,>i iTѨj5EʵpVJku\wY%,gTC9'hQi1ۢ+>h\Hb}S*(Tr2C05ۏ~DTHGSj}"{ 6T'ʜT,)ش2؅P]6D|-f02/b1D(JTd0ӽM/~JG| +&EIY $! ,"*@dBde\;S(5Q)[JhڳSL y?:ϤkH {T49L^’A4eYBZ.S3)W*Ȗ+FKVx p*d"o oR2XJja<iM:)KO8`NkTH8ьh4p,m(؍ʪjxp* g-㹲[}te4eʲh^4.GBbJ]93}'XO䑰`PU /"=,STꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡꫡT_ֲSW z:j̝ZdǮV{jR_r+؜9}Xn=Wra~nV3UyZ7yo ]/A;縤Ox/ÆComu GNOS\۠_?+3AC%|1ڛ\_ lo{ĦjMFXAƖ" {䡍SYґ%Qֱ>Jf\Yaδ4>_XSt|P0azċ0yb`fn0G?}6>_ }dal fiאy4ޚ{-g]RE`e-NksG*.9?U06RӖlԋcaC1C>{XjVA.h2J_oJ7m!?[`>/K^B疺\ HG=|{$#HG=|{$#HG=|{$#HG=|{$#HG=|{$#HG=|{$|/n)ÆkɐN=^H{oFQrw9apm|.Fr .Ƚ߃{?sqqp`aّ>4/]⼾7߼6e~?}.ݮ!ϋlqjHŊAxĩ5o$e5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\5P\ NI\:kZp2[屋k^:xZ9&zjKB/5u t*Oȷhe3c\S ;MnR=`mP5CYc#=|tu6)ntQU=~m?-Wx=n_Xh[pnRۑڎv#HmGj;Rۑڎv#HmGj;Rۑڎv#HmGj;Rۑڎv#HmGj;Rۑڎv#HmGj;Rۑڎv#HmGj 0u>`0jEat_qǜxJohH9{a.3񡻆 zk3+BL|c-3c϶/{ jL<`9/u? i­@PH( !<(Nt.Fr 4.pf~NXq8[Mqm?jz8ůW+v?zϖF `Bo(_ 52j#H5j;@wp'iD3d-@XRY450;|m}?BL9,\xwYxCvƲ4Х˛VnI |?1ibu+Tj=51d~d\d᧦w-<-7Ӽ.ÓF"DA46h~/۫I]l1Zi2K c0 A9f16] F|ao }'qx dzG;46qfn*Hlvhv}v%޽iѤ;kLjȭ`h{ӵv͛p7׻̓9'}g{bl; IZoݽm}o$vhy07=cyNmO4|ȴo$VFOl[7>O7 {^3W5❤m|1? 2p! <O%aܯsA8eEH"-$P(6MaY9dEd)_.&xG%Rk2G>eP[/(;:pAJnpopxx75-uOd2`KF)au+~1O_'vjۆ!A*Ʉ=s)xzȔH R(#W鳾SeՒ&Bd@(!y0":cs;ɟ͐NgQ(TX1:`*)cYdӥ0%si%F8r{ GSפ~Ϗund l(oA=w*=f#@2mlI9FR SDjI=D8-|q9tz,zkCq2 >Vio!v( NJ>׸/mlm.Tյw̏!=3Ser\kに8N`3פ:M]=Vn¼Q c,/k_:s*[䱫_^rP=g`r ,͒E]5 @_G8' 04yO6Zܻ rqic{dY8c,e77G7"tLI\hbz^-w%cWJkUO~)J%X]1@VsYР>1x6Uц jpa<{61.'.&21010.0W2k quƁAA1˜x ub٩KSz֒5l;8S۽]q5ۛ7ZiCi[Ǚew2GMT jWOhB.ݱG:sQ kf{ほԊΔE7h'){n۶Xk5S7ipw5l*-,ֵ]D_/E_fkh]]n_xr6(jomt7Ao-.0 NG./yV:G-QɨɊi((Lzht&zePI,NOn{ nio«ƒ]dⱛFa¶^P6\uԂ+B=.Ff1 Yu/n5Q-8rV&˷5M9iEIɘ"[M.8$x"EHB#0 VAݺ=k3$ǤˤL혔~Is[@nH @ɗ"-n$B$H %F%aZv6uޜ^ۿDA ~xa05;y}#_ak5rԷyv؏߶O.c}tNd"'|k8T2ZGHM=N"zV|UXCoGub~L^g,iez<:iD mv9U`9DHчqəDI̗Nmם[56_Tvr>AfŬoRmzOo ~9@XQ$GwmZ2#@p1Šb̷ނOۍ")8EjU.v{7LƷ~%ʏi ?gQըP!x0ǷW BHzsM6q(*AvOPg[0)z=~e9ilYvNfn=jlx6R9[ǞoznŸPJo[*]ʹ8u(׷)5#U^W:E`} (>}:-~W,xG4jf c#޴dWP,e \z viFؼZ$ϴMト)'đPPQ`Ěyf`;V/azRj)G07y]޹j{͝BP:ڊ(zQ*>c={ B\왐Ws$EϿ1ցzHңfo S`UchR͂yqu͋&OpZQ.rwi?jqIWw)[ ~1Nϵ/mo%," 4t|}c)$s+P #iv lx:2jrx3gKy 1#Lfx|vofnIOߏV92+k1?n]./\3 ?,?3lR[agMd\=RW.{ȕ؃m~a7K mEWⅭ}`GoWO(.9Cj'Y':v/^vs-U*}@a|j_k'p;/}[rLt"2"x*!œ--}@WE6ZeCZ5&JAR Qwkn; ]ڲMFriGP8C;OXh4Es0#7'S.P'p)n²7)q - E -VX7F"Qg gHȸB(a( $[wj\^,wMsm*2QJ{w ٮ8BlW }]+u6(ЂJMXY1VxMRK46f8ܜ|GIMNN=s!(lвB6X(l!,:VtPiE `!,dl5[p-r[R_nG$Qq\dpijXJm0!4BCUXiqDO>^]?zQ:a aBDi``̈ @Y -2HI)mƈ . Ol kcq6;٨|Q0!k) @i)SF%8?[wrF3NE TqJi$넡`3RX`6e(AFk"%BR㤂CQ o(x]tpGf(՜(aY fUZWtw-q8h@c8R4ű## 9TyzIގ2張 c %"[*^>n~eY浯=}p khAuUW+-WRK[tQey@N㬥Q9Ζt>}sT6Odn;x.>$ ?? -<:^`ր#E+cX:yMr[gD&hg sƊ1;K }RHD8N8gqj/C`f%Vx( gfI4W`5T1F ; 5&!jl0LTzNqQs}IKvizS5tE/˫67wP |[vC`tf\)w ^SV(HFVg187ARf:]ʏ:n @(m&_sIm[s zu$1`H+*d-jG P,e \:bӔxD*`Q `-^uV`<ӥSZG/)-ai&E>q(G:b4'3$`w0k}@5-A|}2mҭifzx <w$'$PW7GVKSt}0wws-R`lx(GǕ8)>e@4눉Ɲǚb%H:-Ze}qkdisSe7|7";%!k$b朸" D1mDrgՖKo9$0Gqa$tj'C+婐A2(^:tj%R A#w\stjIF丯/DF-Bv% a0RѳnI ~Om?NŒ[٣ӗmxzzLo~ ׷wsv\6뤳zӿ5#Lm|WGZ[{~,z2p8DeEo޼H9UƵ(Ȳ\jLyq9Nխ=IKlK]ߢDGټy.|wY(sɳ.FXE)Y)6N<ŅiHw]r,;i)֨'-a{#39l\*&iCr/-mK[bR}q7q.,kfMh#-.KgNr0~nTl>*҆5 ٳdM/VxKB#8 : gg uU5! q'!rg d ^P /K.˞r[l"l ( ͘Q0S,siLbQVJC-Q;S2(M Rf1 %#_,$ƯٚF ,$ Lن= H۞>ÔT6X׷111H~2[4Nj)G\NN)6VȗZ9Ǿ[b_S,QP0zB*5xnTUl<⵿JĬWh("L FH[翟.$[[9H<5ݮf"B`z ڛ=ݘ5x$q0K?!-UOB_"1wMRѼF9둹*h5\i>wsUTm-+VXޘ"퍹"i%cn5B|2Ι葹*\qUo"no\l @˫ʻ!~ss{oi8i:]}Gz^W 1[Sn/~h\veQsG>䂒} 1%=gK&ʠ8uS< 0H`2N020OBx"2:-2J'8t ZxD sR"q1#Y Z ]f۔S1JdO)p~Jn^2茉Σ̊ =KiIlHڢ"5fsҊOJL$5Neqп>{g(b9pLǬd̩Kfc3dE!],.i)ePHN,PJ-Ť ZIYJKc^3;` >}0 $z'&bt@%p~64~:.+(Fa$t>rY] E@V >[|ZQȻZ/%b$tL##/[ _h ^|JMWU `' M!d2LPc)_t)CD$8Í%,7*Z3Ի]tK;.'JW w[0Յd}G5: ˳[^'_Q+)U.Xh9^hd)gUVXR략9 +VXEsӤ koXKUO \ $2M\ RNs& }-kic5-e%i,GILShϓN)!KլRJ6qTx_C WtQxg Z7!X|S;΃ J)(rE! 5x`lr15季`tW>e4= }C^M1+(veS|۶cN# }+~PG[lfR,=хtS)x(R[P{MiSeJ汲 UZ$(R*YV*t Y( y%=hd FZρG-TcZhW+_9(q߀މF@oj\^'}7|Н7 B%Ks>In 63eO9mJ2,cVBɊڤQ 3\)20z.r[Hk <B$ә!U.Db@f,E-sj?˯ obvgoC ~7rMՄ `8 T/.2ixDxՀpݷ 56!,݉h&:/2C Z[ Lm0\a25Z lQyQH\zY 9UaHO,Γ_Eomj¼, [Ͻ"r/V%>[_[~$`[4/wnep4 §omz~ats~cggEqp=8kO=w,/n"E} ҕh݂P M#(6X8 hV X,Ё LNf!%}->oy4>BPeB',ht ETyae0KU%5X9YnB2 qW^^ ^e} >Ĭ 1Sgp|eb; d8n5X+]4c]IesFA U#S}0NMT ܯS_[gKB6Y}W 6H8gʜi;yb'i.|8.8.u|׌mG~U #(]ƏBrBKr\Tp9Fl _Å{*:ԇ' F9P*^ɫ)'\Hs ӃQs[XX{}/gH 6Yw+%0OrPo"SŸsǪXEϑy?-|bB-u]tNK,2a;VnM.JSFcGEJt% \e-@eHYrb򄉍 tiA$oe62z0umO0<,lZM +Z^l&'2f'P+tyN`HC $zk-Iϗ󙯠FMh[s*Tp`j%YBZ;`:xH(9,ƱVU/Eʠ`@&\^"jCsKg4Ÿ|Y(/y{ЫYQ2}wht|a{Skbۏ|X6}ڗ| QalYӛ7g7GJMA~f򖿎 7vsjш $%*_2^^NwG4)fS)Zd4t^}8b5xaj:`-: Kz_UWK'ڰ:p8m Wtz>-œYsrk[=*r}gDq6}=v^4#+ӥI vtub#4't]nh_)%+e<[&sǞԲ[ݶjPԸ8Oy+mT}zydIuYQ`<{bQWC2QREYh엘4r{ sFX򋬍iX cnUӐVMgoΚt-R(IS[iR9lxH` 7}> Bf1EXwꬰN ӭekik# gA k/MT0z"1g2!GBbUCeXt}lQn "h~U2WZI_?8Σ˫ۺ48l9pPL%Y33LlYZ VLFox9cU FU6x\;OMNxscP'|e &n ~g=..kկg1Ŭ渢mf#Lp OE`{V5/aEZ}#HL6F$%d\Mŵ}1WEZ\)o + /AyM_c~ )z&A̩vq3LϢ( ~}jܑ~2_\2N -k~^9/W^!M?罨o@pce' l?k{?IZ)yo*!S lXt\i>wsU7h0xUJ-sE ֛"8"%4t͕I#sU21sUU7h,iÚc^_/C"Lߑ^Wzrcyrx,LeȢ)Ae0ȅ7`)$ѢS!h>8COs$XƁ);pSQ(P8aSH]$H`QPfU̜,R.4ߐӔ_&kBʲ(}pg'pQ<5dJ31,xkW!a(介߹ee# le,V;+ YjXf,+in˴'\^nÌC\:  Sr,@F2Ad),F%2Wot>h3BZSz?fMmL|{>$7rBtwֿ8[AB .F@zڝWz`c\ f-DEL6Q%e "= CHK1z8LsI# GULǷ , ANiX-crpKېw6ŏn44p:yJ㤽r/eƒ\BļsQDc(Oho[ۈ;0FE+}7epY)„T`e~?ߤ%tF_u>e{w7i؛Lu*UmFwד+l> ?Gw-\ؐFg+&^' wcYRF젶dlP+-Sw.4y,Fk hp OoQ \Ǐ9 /E%NTSm%EWUpK &!Ct%A_DgYF y:g&!hRhcR+@\ۛnI/ݰ5xWe:Y~.HݶewމJ{x6;$tG*~u4z_Ukާ5?}.2Â֛Zv}&5FMίD{1KC\)8+>E "mp&֠͒)՟jTajzLӘ03IQ˄9$!!F«_VZe|XJ^>M }63^&wyu֭ۏau8RL3x $:뻹 )5_~c"^YGkl"Y:b bJ~I-!ʂ崄(RZB՚RYVim (ZU} 7k4|ZZV4$%[C1I 7|Do!U;kZFX?i: C>Hd&tL?.*@[Yi͂x!ѪBO.V.;f98lm;Zi+DLodC@y|}s[7cg R@T*sYJ N)$,2@Ū,J1e+ {depnrrLkeP!S@]eg>FӼubsv*c٦h`;v"Gԡelr;pHl ߠm)4\ L8&0Qx-;۲-Զ46RZDYA*"N;Q^lFPI@q@teg[VE.(@KО9iEI$MyP)d ^Y 1x%6354wUkeRZ5o'*vbw-z"U#8T*tX `3X;u_0,ui .A<~>}F|5^S*t\JJfnٔ"6༷ 1@>ZȑJZ᝻IGHgwmjoi0w y2Zby#N\,hvsf|0K@EVy) If#wo19.#h.02z mjϢ̒F压^uKp\pɥ%v&JZX||+,!MNHPnrof&Ex2'-/4zM T)Ce/rG[Hi:UxT%T4^IT" aJFet1{OtL̂qdL0Rl=.E38bJ8$Bdq7J9,FO,,vIu<*Y(UJYi& #胁bZ$MpA>@ 㴎:N;&]4)7oD^"^wMbp`?f V:Q\JOo? 1K)XNC!h_ wD15]҆&=)Pd3IR&J̆U!j.&SA\9R S^%exJJU G/}(^TM_Mq:98jJ=@$9B@{i"3MTHaOӤpmy}I `0>zA_K^ٹ[Kvj=Tڅ  i-o& p_Ҙ`(o*@#bQOЌ? ԃ XDtƓ\:ZF mC۳op_ٿsm@f=n@z\FUpݟLf-#革D1&/],[աIWLKpmb&%y1 Kn>dx MRej.GUݫ͎LJuƳ|d2؀ *3υd)+9MQ\kW71t;GBKϳ1|ywS8 ^@>:/~S+澵txDxM`#OTq˵MZF9MSǭRjTQwMqV#fGV@ 6]ΪsXLШx̌3jҹ@ISGG۰D}B/uU}03/MWw? {m۷p׫kO8Cm>l:%}R|'Io,Εx*WsM4>|'>_@+৯ðmPQ X-%5=Qbtd ðK;ތCy:L *l}KW:f߷di2%8ZKb4Ad ,??xGOCi8!nqL?]߳}oW6!x|UӤw[ҦplD0 0FY>C_l? e4L4Pɍ 笼Dd[=z{{ح4^M5^^^{Fe=d ,W{ܛiJEJgN#۬egfl>66lj'wN 4I 2@-[,ɐrqmhZ3HHKGu$[#zwݟ$L6'96_SHc7fw,N젔]'v`sN =Gm%&I|qlYlϝ) /vZ=\j $ÝL$[&/S oy9p͟%Eܰ8i ~(6zvo{r*ݻ]sݟV#㧪>{U0bPT k?^ܫM(!ܧ>B͑!wjِ-{]FN]/єw]O=|oV|Pj?ֵ6CV.E0O=)ܠgt3Go>ofᗙӭ]Cf#~^x uY):}T׃@ #Z`)Wb[e >{Y]Rk5a/h)*z L㰁I]7>nkĽUK̇ܤ!Wm KiD (< bme^8yk{8`u$<;Yl!{EjYR_)i"PYzJFe#YBOgc`a Ę"zfHo7Dgzfjz KV!"w|oYǀN"6LSaie_>Um߃r oHB'cyK*~וʀ> k*cqT%3 ZPQI i6% Tx|;fȅL~o“9aB˜4s㱉-e9+z{E}uw/tXV#i$Sqk(8$u~! u2fab&XYGi^! bJ <* s~A z# M2,,VJ\n58U4\k?%MŔ<וp d-4 r+xO<㼔>^39J iØvL ňH+WVp-4RZFd>ZRc ,D(}Ԗx93Tn˘M͖q=J9-62^j yf[ȋ-ܫ-#ߦ5[rF]L]Mzo)[l4bhRcι֠z )`ikL;@:,"aHO)vY-6UL 3 j-$M* %^Fp `6 `VlbA8bZmjb a"88fh \G3r08`nOEO;81>z"kfsnD;-H(Hbٻ74FAaz}~#.^r+le% ,bġ֏š׏֏@ kJeH 3£H"`fD ± `hp 8M(rnӦ Ȣ`;X+\$JH!y j Z 'VF02oZ][?: ۡ##/ 9aHsFjJ >ƙw92#fJ`f+etPz\f (x< 1UԓH}$GA q,zœEV(!c&8`1wځZDl ha[0 ^И fg8.H ͼ|0> DUy@`xH sLyӬ͂WWG)1mᯩ3mZ%aZ70섻$l1PtELJi35thF?F.w v< yK"{ĕBN*)+dn)9 cЋZ2(Hұ01.5:fřOc/޵ ;p ?06¹e66[@pM'Od =n SE>{|͡OT̗hɇ4A?jGx@?o'yuBLv85EW]vZ^9qgA߁P_zЯYcuQFagND$.Tbn$3}p *i҂0jkg59ݦ@o/ʸ\ltUAs%Y註.xsh^ٲMKV*^*Nʴ86(NzkFēԌa gTz=~q-V^ܾ.@}h&g_Zg5'VHVVjW \ŐӕTfiAіŷNn}qȶNpXI4W +7U-Z͞Lp|;}mg~L!oL$F,B0RV>㑅H>H@D b FрG!eLD:[ÅuT)͎Eԛ#l^Yv=/mz6Jvzէ03Mћԛ(>g |XSpڂppnnjAd%J\|XzYK4(fQ)g]4t^}8b8R<. x2'Cz+ǴMB=kJ vr4qIFOo'ZbN-,K2V#oj^uu1|u =::@=#'{֙+[M'fؽkUl6ܳHmIU-*Lomv1a5Ic Ve/6:O!%D> .YGʊx6+2W~tRu758pd+#UFl2K}!('JA A{q0^H5ȤjQ{ǽÆ0ɐu:\ܲp,+5kԣ*e͞{z0)7GG-P`4P%P0̱j<8cgmI BrJÃ&a: 4c)͠ 3V""@)=8+TlzgVk`TZ Ց Ra͡PEǸ.WqSWX1te8+*U{z:ξSW:e`k.PHbh՞:#,RFXp4xz:^ZzN:@ {Z`óS 1j fQ3|zǁzp7FmØ6gvop(9Jd\otтÄ^w8^Zʄ2xM>jb w MX5u[9rKrG 2l7`vnz9xqa`Ik@lZ  .@>xQeV(DV3ϐ ť)%DO5b9Q8kqu\EZ+gy$ϒpN `M0L{ P[RI,UX10VWZ1bN߲ŷ%d)L m-Yq.?kgc^k68P~ `m0us4m:+nۄ6v>7?qD3_X&jrӄڧ1>i&Iu0{h;jצDw`4vé܁]vZ0^)G32WI`\%q?sRDN\%)1+ +w{STO3Y1 Π^@A7 k.vy_f{1s@_ZaxjdwɄ@w rܷĎc0]ߟtA`RGTQJmŌUo҂n#4d*CvCv%sdnCG悖#Kd򕷡Fq8:#& x4I޿h@Z=$%Qţy szF &|`LJҊ_IRJ^4Wk%i{ XJ⒳Y/Nru*I)D1Wo\IAgdUWJҪwAJn[4W5.ro^biP9/]tc֫liM ᣰ>)*49D'309#e4<2"7"D[2+ikd݄ڥ ӈ0Bw nFF1L.{݌Q5fc2`c>nׁ.4.|wߛ;wcI0I '(#L} :u$%e{SD}_? }/p} S4B}_< }/p}_ }?hء otOQ^IFS8*'68AъQ6B´OzUopy tQky WӷvF<9CgGMKdv= 7I+#mjJvz.)*̌-+zS$foY(Ȣ5+XCl*\пsWG2 tI۴+$Us#0-zZ/k]4sf,y<\𬋆ЫGVVDžOĴbHo;{vTIgMasnf6.t-D5a\)Y%bmֶi/Zlh}r6-z«=N[k$j o7 @m=m_pM6ibAwLQ)fQ}4=ÚM-=vB: ?{ƍl ˟8x4F\CMݽ[)<%ŴTlgk6D")K+V4it4\fj(j- J W0.zu}5ۥetP7Iq ȌdgFE9eTvwC&67;=V|R"+ޚE؇ zMr_+¦U}uK?R=JדoBo4F.7Fp'|^,$ A`B,L& bZq4Cg, *Vwkx;!QK `myeVyK5+>[ƀFu.UI7WY 1k:s ZY L(amuku~L4_xrpN|<8<+YOw޿ooϔcrMJ.%@M)> њeetVymvlg=KQ㭷8@y'Y$42e6$C$x.d >T%`}՞kن2J][LNNy7FH%k7a̬}!ۍi %^Ḿ]etLs1!/xܕa,QI}r}UVNdѵ/<6{cI2YЈqmԧX g:y#z$J,%VM1{O$1K.hȥz]38Z:LNgfvϸV/T]NDvz["k˳}ty!\ϓ0O[|gtn8}^M/cddRZzkf%aiQYgQYF, 9䡮 Jr!`{!EqRkFh2=찉F܊:&:.Szsu n,23Ƶ ΁ȉ|s62k4(T01dBE I$0h1d\jl=_,x*ZxG4=iy"gL ښHn6\FC#}@H:ind 048ԈگYl;P}imH;Aen>8ӮqgOZ"٦D2_"g?;]i .iZ]V"F9|<ؖER4Y`Jv~=Y([)M*bhNJBe# ixPpu^$]H|KYßViĕ"'}}KSNd:<$nx13|&$r4[lu;-kyhw&)(Y(I| Eb0 2Cp1娫BAcɅry>rl  >}`8m`)j+Cjls*BnHhyV#_˧7 Qx_'>4n-NS+=@n1  J{fCX4PKtNdNyYSm0S<&D 0ꨜAH\uK,|֪h]U#U/AzVBvgAiv˭o]oȍM ?W[[2\02]󗫻՛oOy=rӻk6Sܚ ʎ\:=;_m;p+7"V~3??dexos{=WQ2jP5p8XF ZI|D9Yj9IsRC\VEU(ڜu@oy2̟' k/9vl'8+(v4.-kPo|ݎL-Dx0 ;z[8&^k..Sxߑi}V U^uRJ߁+w0-K,f}- J_/ãSٓ^\g&?95un܊1E,އ/ip?~Ji0~3׃)]WGr1bqoOS#^>0eݫ^}^>Wleǻ zBw=6p/ힻK4{o[D l1<1$-;#bX8SgC \ 1,J{İXd# cg`llXdpXi[5+Ls (^̵x.Ƃb]Bw\+ӫay}0qя>1K4(n[j#82`t֩9,h"x^7 nտow`.G/L>_2oW77yѸs> /<޽lL!tpՁAjr)S0EY K^;DV_F7w0O޴Yb ր߼l|pE/7?+c&ݡ/i--kKlwn} 'M~~h@HtB,Xj}!:NKr&Xe60 *2IM*z%+e =sfJ&O~&4B 6VV$UӿjWM_29:BmӿjਁUӿjWM_5RS`1Xq1WqO>XiU[;~khZ,_5Uӿjpвia ]ҸᵺQ+( b1yE2:0A] >r>r>r"AL^RAldI$L`V(JwI2|ՈT ) ҺYVʗ Y X[D Yxo00kG̫=b R\?/t6>x7yM7$M/YT!'9/?Ԃ>S$wq5?ۿ#V; Nsp;@O1l lǔdY,QH.2uB L:9̐+%*c s9Pf'l䄪P90J̕`5s>/ ifgaWNoȚo(/Mᓷq=;4n-NSWyyVrr~UV7>Yh _l{IZ `fqeJǫE7 5r(FwjΉ7oCZ#k%z 6z0UDŽȢ1sFs9I +޵#biR4 0ݙyjkHI/oQ۲uMY:6t֡T},~L%Ad|BDU&%VYXfeˀ.} {bD̑JkㅮLSiD>1`+:~0rRd{s䜲&w7s¦Ng\t!&ܳ00Q[g{Rm嵃iڴb]w }rv̠B.o2Knz;G=7m;eޜߐZvԲ[zս|x͗^0wouuw.ẏx;Zx.oKUYx,zktm^kIRC[ IJr~?skrBbxH]}5B~N'|gy͝gJw2S2E󖟷Ls))(S$>%Jg0N)Ō1`eୢJE!toD^uڕ=BM/a^$[Y'bb*`x=qIH])Y%lr=(HH(]V8PSq _J8w?\iYH$ {mP(Չ(UeGTDӊNQ-;<@eNrA`y(>&Ʃޔc@GrIQhS*DL)ĩ$=Fb:g h'BFxVvco|Dߏei~zv\޲FFNDU<SpxtGZ={5CiĻ]O?~ kr:,(qQk*Cʒ[#NlPЙ Hmd:0\fdFٹƾ<+=&'Bk3 /Tt٧@=gc ?ތ$=w3ԭ)Eh[ S3S`O2XI~b 慄 OVi4fn? C_=~dL?rѽ/o96NMMp*&|°//x 睇٣-"adv/F>igwݼNnAg[tynr>jj{e`#mbش/3N懵>s {XYю|L--@̧- Whg/ԞТH[Tulqq9-27Df  u-qāՂ╃nb#_^Wa zҐZ6~WsYiq%߂.,eʝ=oH<ȇ-d!%oԣ.C<%=+˥IWp=|_1s&-]W*[: dooFmac՘.d)[ǡyOT`]Nz.Zj ̓O;=6fXԴ8/x+MT}qyh03;ER`e:(ڛ8ώˌXddgFEgs(y499&L|}~rV$*69p|T-leU` Lo^13Y!%H2&NX2%L/ x bZqci̲?,~޲)1nB~jn\;oirWP\cI/O]wq0ἲv!6f0*:f]'cNXkucIEKX[ºWj0 ҝ;UD'אu?Q7|u83ߟl٩t*c&%VYXfRtED#eeP:T^/t5ѤB#]>ʇG8ցNrɳH`u,!>%pAɴև` AJS皂"cH*[I_+^ɉ OqcTZګa6TΠ?De&4R#7)?ϥst7CrK׷{(lF[owN2ʵS<yn1S3 Z_HX\|:j "z:Z^iUQ[c޳r"+B.21ѥhsgR\Kǹ j#c5s#nr,BU U£}+3[ȫo;9 ͝hйp|3Lgo1!lv:aVJ!RIJ*DӢBV: HD)yJJ`{!EMM5D#0LHv`"![x%ռ㹨+n.ˤ ŵ ΁ȉ|['L`UYfL Xpusq[Ł2db(2šI$lD cVJqIjY b5x."ʈh"6Dܹ!^%PYTtƗ?-ee42daJS3 PnHYNP I'͍L6ʤ ṼʈX͜`9uV⢭bųG.{s;ԋޣvwU^4}qToC^,eOC^ !/U漏C^XyO8q}G#"@oઘy_XkX;+,צGpE zW\vUgWJ\C leઘ /pU5X '\Qnu%E?q;9܈6xs;.xs1GUWӋ0YV*U P] >(E*s\2NO|tsJQ_.rϗӍKynT[/AȻKra. UVt7ɾ@*Jr4akR .ӬQ= =LIBҠ @7\B@PmI`fwPwE۸+b.. ̋ެۛ=ʛ/Q-o \sǮx9\ͭWܲ,^_kU1UV}cRWWD}3YuYϋ.oyqj>V&@pqo_دLV^Rt`)Tf\)UV fY]UJ_ً>)] @YEuv0_>}_t %77seI67gwLm:MG6ݦttnӖh:MG6݆MG6ݦttm:MG6݆MG6ݦpꖆųYCϦzTq/U) T/\dfɧt9)u9"q*|iUH.ZY䆲>>EJ^dϫu5󠕔GES`h*ukwNZy`Eqdz8}׳;W"2 d̓`>'> թ . ^nl@`waDzBF Q7N%aI*a:-G|)(YZQD,Q'+#d"@.3˕*c s9꺛-cz\ d ̽*:ځӨKQ\{`-s( _(|?{WHO;GX`ؙ`VJRY KJ)[u٦Y@UJɌ~A2 ]w]xrh̊tdb u~b>{I9&Kkh\i p-`܆'4'go;*BT,?*5d* Nu*G%:1$mn+`*"$2Hl\'kׯuö=c>YwGN'sڐ'DILPB Jh7Sm֞ tS4qr'񏟧E9\%SL`ƅh3&(8*nGkDd&P婱6Q|6JuqÂ:]_e c_h흚Œse:E)\)rǥg0u AxzkB =IzWA?lPL>@B 0\1/<9~+5.Li]RJvUB^m/z 8F+ .9StsIF&} 5U0irc7R̩41̞|Xgwf䂻K}[ys6K nUk ɧ}\]Gbڴ/+N1^ߊ5̊/kv*de%Rf q7Ayߦ#3ƷgOiHvKTXes7b0a^07"f; řqa]^vWiLz֔Z6as YiMwv/t4]XoeT-N R }d|#zcWP>VGLz']*~3sf"uKt468z,g5 5r8?]vlݩ V&"D tKW7r94 n?xtʛAQӶp:㭴pRamOF/dQA*/+d,Y CQz&hJ- *{υ9`O9W2wt{΢ktO}Ivp,T!W]5!FPrnA L?[}TJY K3Gq4pA!Ur\coLF wͽByi!%0!AgV虗/DZ۞ũAWG^r2vjqnv\-m#N_gwcVt{7bςMa%ל%T`F4f9䌳"2-Q5brd<7)Z5+aQ%Ome &a ފހqiW+,cb{L(d;h"K\s%qwg7dhh3TOARh@8@vg$8Qb-[lb+- lZ#`,7FDBƩr+J3P JX 9< ZlbJUS0^vjL$1H"jʽ^P"%i'48wJ2m m~V|@]ïӌIB~2U5ltbĮ#,7|sq/|EZΗXl|/ *鑻*#]eq"s_ﮊzJ!]e!kwWE\a⮊٨Z5w0Lu6.L(Q?_GCEDN Y,-P}g6S z䮲]q]i/)7wݕg빫"0guBT]ivwUݕO"޸"bjSqHzH0OH㮊⮊׿S\-GqWFJ˫ ˻#xro?/vs_]zT9ЩUJ+4^1j J6_ONR.FA!4xf,A9B4t; $嗛GLtz}2G^Wֿ+9!šd3~D h %h 8F{&gAZ!IjElb+Itx"zeVkM?_uo}zR!O59ˢ@6qTD$]:ZjSmyW*g'9ey" ŤJ)TU ^r@O³)nBJƅ 1ДoFL(IHϩg1'VT;ŭ8Sܲ&kd=SR0S`vT-%]& o:\0<xz-"Ls#RAv'TbВyHgJA-xc9X@Zב^5՚GX`vwu4:uGa2_ -WGw/K8icJ>% ϐW+I@vs)eÌZR%vdZNZ,rLˏB x.6{GJ 0SaP>x9JfD@œYpp |  } nٞiln{ͧ^?|5,R(e g ,By@*e42©h4:rs 11DC-Y(x$G8\{ƭF3Au_b}v PWwvWKѷiǷ祊-wU/!QJV! :,>K8uI#QrC4<׼yJg|? }t=5ϣs#%J_>4: 0w@`c*p`XvzsAO {[= EuFhH@θATqY}5\Y/Ս~nb?j ,PT;e!d%TÉ="_ ӱgo܍eZn|c&=SwFk7'lq,~"umiVƻMHM+\os']*~3kxb íHa~8v&g]t&f:FKpųu.\#E˗nSi~?,E((JGP'&dC?NJQ+S홗"R\tm{N^Ʀy{V*Psdh0'?y9Ejr:f,j0 JF=z PjBѼ^W4Ч[2Wq CtX#;ۨrdg~:CwE`xLRI}Rj/R@-t-]Ru/?*})bP`L#bPC@oy_K+r%]NrwV A:/176&?6(*KȏB-oKI@ػZHFh?b'Fd@< UpT2wܲ㖅ՓtQ0?"UrI$H<!3%S+pIA9&;-8|~ȱ@0_Kcn-w]+9NU+%L,nsؠa I:[b @kCjc%y`aZ yVI0MqU}}0O=&b1Jk1 a.x뜡2Dy &g F8*lU@53]SkB]{u*g!*rXԊ{-C>'aw^E 622j oDE 9J\(-;p(Cr5PDpJ sbپD\q E{v>o19yꮾȐ>5%WqPmSE5[;5nÉh)xx,w>%.RL}iFj`A;)8uBĹ.V)ı;!qpE*$X1$R*&TR5cl׌QIta1WºPtypFn{ǝDЏW~۠77kl &t2$0wXaZo AHGeb Ӳ\"|Fs6T^3A%{D쇉6V.ugv^#碵IǾZ[ڲ\ .[ep]L`-Qq5Aē4Dor$iA2"C Ȣ= uMXdKBx$Qź]6~rhbFT54b7K/x,r "Rb9/lWKI\eSM G$z5D%U-ej !ٜ})~*lKj6]fc#>ck\$^q;!rcCN&efZh Ak!ގR8>+"@պF*j2m"*N1"}˿1n~jKp8Mk&"gIP`$"\@OHИ>BY˵pAb"&"b2;c$:'-.ciWGTwvIb5YsDAM:dhB%N9?&u >PT0@w9UγQijƷ4CƂHsBtJ%XA9.gQBpRcx,1z;'4iH7&NjvbX |S,~\<2IG<C8lwh E介 ?)a}w?(j8?za{; ;~\/'#\M$سEYg[Δ}\lŷc>4A6h\́^[BM)+^v$MV cm7{5U{+?7lL&jrjO;ްU/ի?VI{?j¶KcJc~0j&'şObʃmDŽk1*w5vmaAL"5`s:cBgۋy^2J_a MOr:IBQBw<(WI'׫X͌ rИ~j2GbtYUAn ~w9q׺oM][jcbe7xw{F{qs38T\y 3|hl;bx"b9cnAw,aA 7=qUb%,xV 9 B,jgIŸNzt1KKKKkLp,Iq ::RxӒ\[X6 HeIψ>1cJ*Sbپci|ϸ䵯Ց[op.x_=܉VERDz8|z̍ѽ.wbsj*&j=>ւ&#;0ji TK<$YN 2j  gRh*Z(MA L@uN?f@7;hdqq,<҂F0t`g4 Y *fw. cU:8B<"iALWOWA,r}ː %h+^HQ*f[='胯P'Z6A$R˛k\/v^7 e^?s ث8X>uR;2ĽN@.@MT\yik/wnMPnFid28bsz3&i~:;pa~QfҸԓM:[/D֑BS;LySQ۞4i+ia|;7`$b?jB5#GZvCov_v&(A=r"P%Z3+a`Eݰ~c%xG47yI`xԤK9v*PICԚB zAX<)*XX-,i$.cD0ëʕi9]9C"e`T<j[#Tź1P\!_";_^縅*{fڱr*t:ki<ʽlv||wzx>eoiغuzq8{-7|s6fsClYcI5oixyxǹv\AMnJ.2S9o|\zhN^tͷ%䎟97Rv')04^ JdK%rt 9]B.!}צKѝOtDK%rt;t]Bsū\MS;΍sh:7΍sh:7΍shNpn4B;C (QEPYRm5N-ⴣcy=jh5Kp ' Z!LIꑍY6 q6 G TT|߉1ʲ3 ;M֑}``CeW5٣ׄt"ƱCG'BGjP9x찙+}swz*so=rr_a9x*8cJb?\.pD"׎3[KJߢr;-W&GiF.BD8ghB%N9^Q{@D 9I hf Jq&Τ&Q1xΒEKGGv.:h|axCqvGrkXT9/$,5>h|{l 6d>RMV "yKB)BtJ%XA9.gQBpRcx,1<Mb7rn)c,؅Mێ9vwXcUT#zGfb3iztU5k,/~p~oNv&@F kڏȝy:Լ{Zڳ|?^n]fŻ<^jysFǻX Ul2ey!mNZ6r~(릾7w}o~YV lF5^<\n¤(UYԯi]49_v޶9CVI\!=J=uTC] u='ʼSZ6"KFGυV{'W2&3B5x`OZeC{0kҖ҇woj?/9znn VREG7Q~O_.ߚ+ʹk|>?Kz{=US#aҒ۠Q3rhUkM7Dp;N(wk^A He&L de1XLz9^ȭN ) mtLjL_P^>WtuN"ZyBltpD^q1bQsG>䂒} 1%=gK&ʠ!~姻 0e

<*yx uպOrQK EFf@s3Tp鲋"r/ %֍ۑJq!(OeRedJ Cb!_m̵m5sֳ[BB]{a~ԕz&Їq^OlҰj<'bŔZ-=\IvSJ' tM#c]I esFA 76tc)]Ǜ@]FZV8kSw+b)m((ЪK0n*=6?y}&2k2YLQ+l ; "0-j;zmGHwN%4Q¨*KP> 65 ZSJR&dI!KFjȋ!S j>Tgz,˙&z9b^\..l9pPL%Y33LlYZ VLFox9cLw:9εs$z]ePKO*#gp5s< `0+6;6_5_SL|Yg CIκa`ECT |j҈ N; E\ |[6H}K">Jcza+F F&,"&;ij ^ͷle%UNjIS ^ ܛ܄9H2ԕ`,K! D ZqccѾ,R`̪Sy-4|W ?QiqVoS [X#N8J; i óig+fzWo |2iW0fb|ݬհ_kRYPk4/&i&ɲ-Y5uܻ9q>L]³f~0 Ԣ Gk"d OБ[6CgMNo><{G:`~hp_zL_|$7;N*Ͽ`Qw{z4~7]ozdh7uNOKgԍa,Ub7t:NƜ:lYX dQmm&(vk[ٲ"S2k]&!JuEW&Lt3Wh6 `|JZea b.CD r3; +pE>"/}[]=$_;>DALp1 X*x%"Y#X40M\+o'"N04D wo1y 4I(:]T 0'j5sOtQܨnH^t);?,1B7nW6NN] z<6@X%3%{Yܒx ^rJ|hة|hi)P[=ѕOci "S:ڠAiUQ[c!0<-6fM21 O ޤs>HFjGz\VbaEgǷ8H99GKޮgA]j4~c hr 9I\hZT;+ 8: HD)y]wIn-c(Ξ66!hFh2v̒ .ldL+֖ezb jWۢںvc/o9e2HokAma.Ze֎iPV9fo6UC&fȁhDLJ$aQq$|YET'k_T3g=Ny`GcAjc[D44DḽF%UdKR):'$|l4Mrƴ]ֆ4 ΄BIs#E$OZNéhO{f*d庋I_|Ǎٍ-JhEeB^F߮`#7tusCE'ƌ̦;Ng2h0.Ёw+u]90;^i=]?Mwdx J\8.U$cqB =kx4d>~X<(#+2XIw2pUTXkͱYYۀypLU1WSbpUt]G*ze3X' k;\+ucW̞"q] dӊp`ra;+欔 ߾]7)RM:M~~ѓf:g6-V0*q#dl@zi!x]+ъC8D+ъC8D+ъC8ljamjMm7֛zS[ojMm)5֛zS[ojMm7Mm77.6֛zS[ojMmdsU~w 2 avJ]s=@#U]Hwp*E\MVOiqJJ k#3ZG Hp/i9hd) N K&L"r BĸYD~\F0[ $|vQ-ssǧFRr¤:j8Q87Tve<=nvJ{H6wjm%+z6 ?Z86vJr ~Y=v_|Qvi"G.H;͙0[r*6r뒣5䲃4Fd#ZJ$r( K8,'oUǝ'rރCBƿӌħ?+nvTF\PJ9C Aje#MgIC ȹ&o;co4wn&NÊ &K`yby9Kmo^˿ {BN2Wʓ#&kaߣawuORɦ=ָmlZp *]bV4N&57]/{דy9zt Vj̯;{YxJGp_wvNG}^ʿ[(4K,8^ieZI>D?NzAww{cjWzL{ǟbԇӢ:"h]b|g+oB9ɝKHiwZɒVHK]6%E1K+De!dEbDl(rYVcQM)^m'6C Ngvɕi"%.OӍ3rJRK %[.[MYJ; MIS'3'` tz)DGoHɨHa+8+|Z|?.yyG"~H <Ϸ[ ×m>6x|ͺݦ* W-"YAIt1pDoLT-J#SvN$H%%b+5\ENf2D~(TкPo8t7/|bgU«ivO׫Yeݸڼ*vw]{2 Kxۗuǭ_[w7wwϏs\J䙑5'U)M"5F+"gUIjs {k{vsҽ9昿͟u-0KQzǶw&gFT-w9MiH>)+uF.tёQ>;TSELPƻY&V *|*qUT&7G]0%]Hٳ&L@Kֶ6:ވ>͜RC-⒜EP;::#UI[=Yjem?l2:%.3h c RS`3Ơl.'%ʣb>xt*'NVsf8G-'K:Rvjm밻"MƧuOA̴Ѧ*ݹ^~h@E@euJ O2SD""KM*I pFcUׁbx9bNxD =XqFX]+E4 wA-([;lmȷ[XiRx!O3=뜕|xUnKpQnvDx ^TT &lVHXB_r- N!DS 4"l1 c+ ژDz3d#b:C*aê ұtMs;xRJ|5Dc3NIKʟ"fW2jEH!a{N_ؾ{}lP9? YK 5Flf+^D'T"k)"EN J'5^ҫ#0n_d("c^b XZ?Oi[.xqzyJ u\i<)`8C>mm9%x;TtAhөbpk5ԇPJ)!:AHɪԦZ_*NQz@"Ģb۸[ўЯJXwK B_?k&£ 1$1 9h/1?Zzuf~1}Uo ?H9-:Rw ʓs*?)Y+t1:'dMJAOY|֪2j Y GhOqzug:>uNQSw>#nLm™vK-e%.@] !" _qᛮx2_\ǻ1OwpDu30s ڠAĈb6gdAS

,~ؙC <Ȕ J0;MF< ~{%q*@OU-7uH9޿({)zb4RxGZ2ʕd3jjEyI#dѯj&yu1LᆌU?<|ḳ8n8y_xO"LiQ,w]R|x:jUovӑ"%k {[%SXx0E٥(IdCQ(م%sIYvUr]"LhjA\[HA1?PCQ(BbOd\ㅇuzgU&,ο`]2AzPP4Yhr1[CAY 4 ]KE\D>͆TJx` Xj)*( [7߈oU˜n^Ce2 MnvhO; ,hYk=%v<,+ Ev=SG=;wEv^tvЛ D_?[QaAM.B6_j[yfpQ7:5Y5#L*&N;t\:JˆS6fD;0F<ÄQ'0bCv^GRzVJ D oʰHNZE}QI*JWy&f*ٓlc#٘ψ޵6#$Vd1@6g} leWgdI#i3deKZm:<>jvu],~E.Wm^5:bR^"VX$ V$lH(w+TNV0ĺHP2x2U/B/O)x>ԅ~>< { Lkc6s7"/o{FkWO "~\ۗ_4ʼn5y,el mvMc.U/ ^qu|;#p?ܳ~IT<:iɡ:+i9q^Y9& . H#zDB ܆_j%AϽ.,U>0cі *nR!]I,JF̒T!UG6ZLOk{{Ѽ)1?>bv.)A ۍ6ߦQOA"8 73HJJ N nZ:RwF‰L * /UneE) > i>PT0}2I9?/qؘf>~]q4R-10/FZ.aN( tQ*]9]~Mk4869^.Z6pP욕VD J`;?G j|ĸO&xӥxOnw7_GՉÿFAz%e֊J+tEϐ; >wvNta{:-Cxnǻ0vy|L+A Z0/_GGQ~[7& E}*MO0oL- }Nk~vv j^l+:Ҩ:9d%l֘a0mpw W`K!m!1}ZB@,ycycyc(/\\pT0;"&!Qs`&pCYu8M$|2(LjjDtYSRKoAcgK-tN{Q ֎7$n-4w=uG{͋`>%$EDB^J1)l>q۾g`QL bm#*6:`=.zV x,%KδdIQ!p"#9wX/@ s"z RfRD/Dô4P)8ZFjoF%ctJB/_'aDB+Y?Yrn}BZg},K`xLZ2fo b*^݆W*B' QؖCnWaCh 4esVJ$5 STRG(A/= R`>Dm,XHAk ^tpz-#H[fwVI cfr+kl3wd tgŌS&n[7ɧv}jdL-rfۆƻwՋr?SdjσcM`2\ IAF $;f"V"EIC7kJud@Κ\R o#$?BAL!4~J|EvoVӔ~ @< E-_WR\h}y‰f> Kq^u6lf@4 fhޅ,oOf[#5^}!g5@U8ݞZ'%~8HTGNu+W]J  g9( k6Gtc ]`V2ORKLB88o@eO^0F‡4c "(4xN48L^w?wvzFYNKzv\^7l(oBA=O׾eZCn:q%|K|kecrhM@yp5ݡ|K׹Dvfjk&Ertq9Jfp^J1E$L4LFʴdF`HGИHQr_P suD˺dŒ(&^\HCU &&fRr5 eDsUDpm jgЗ͚%Qe]jœȄѠ)nŒV\ݷ;A?*8S:Ang68N #K?s*e9rx8QPKXFlTZr]lj\ZcUSH;_G꩚4J ,Hb$X&Vw<ׂp2(fBVSDdn0dUA5zQsRogkxCB R 1XAyl i ZUg}Y~QOvC9/(2ԛ@Qifpr& JBدO 343`g$  W(.bP<3!к#Q7;(I^IP%HJK|O~jnS/)' s'_W-)*(gPŶVWI[p.`2Fh,uUzhZ{y"&oG1bUس^ĒěeÊ(angt#v\$)mwa LyœTAH"x#(u"[@)ZbvD9b fٖ ^M?9|~w&HU!hC@MG* PC$ĀWWDe tqگ.l1Xۅ%{®e ybxBA\xꂋ0r p>|a3 w4Xg85]y/ K⃗@46Rh0 j^Yaj[ֹV5P1g W8^84>F>gCuH ;D8ݕn[7~a7z$u~%Wnr H"RA&)F#QdD⍔L:*:hI}4h-Tjm \ A Ft c0T)InҤ3;gFu8Ď='p.v~byQm)řjYȲJjyAjv2\_J!-s/ %JqrA*3Ũ.KQW[]]%.~BuUN&sA*1,uٕRUVz*sDUW?Tu~?lHoff(z#) "n1Ǜ(=U$y^kӣ??` M]iCnf?F.ϯ7m4KwPDJ]uq=^mޯg fyk ys+RF-|d(cB"@Uq]UKixZYA2N/b[ρNs$\ ',a Ӹi##+QgQ.+"Ʋi#V >>}=+:՜ `wEM0C3  ls0k<VEuTFqR.2kUaJ*c頑bvy tov;m1l^+9@>RLop?Oܶƙf8?bcӂXH*l#@ާՂF]˲(EqQ!F(=GSX/@ s"z Rf!^:=M ZOi2R8xF4H /y, O V/_aDB+Yzy{TӺ+Hosl}Ouط[1&X r*6.-?{̥>?S2"x /ůf2GQ!^Ɇ1,Y)ժVZՠD%2JKDx)06 ,HAk ^-B0!H Z:o;cB"^HyDk47TVu1vMא` FiN04ek|k=i׮ApG6E6=nwlw*?d oҝ_0Ey]31n۝1K{Wȑd џvVy4`,31X; OIc(RȪDJJJTmẘʊ|"3~\L yKk]1Kn׎[vM52{^hYJzͯnj}MmoWw>ɁS/zS3Vj ,;6k,Ľ.mvG_3B3/A8,={(!շ?ϓXq\Q;.UwK2 ]A;-8]Pl$Z'i8,ӽޥ{wS\NĶ|#ˍ w|=,=p{cA?}zu$ண_U+u:[%}Tb.7kwwV-Ω&j|5c/ة4 f<8=n״ܺry?\zbXڼךکe:6qX\g銽9v_w,6uÂ[ang,"6/BÂqY?|8^B8)Zz o Ѐ4 U# >%]C >G ӽ,ht)(SD#|2K ʠicNCDBJtAZ"tRdf$JV sk YxJ3kjlgBj\۰E?kJ&01^OlZwjbŔ O-1^]9MKXLQ!kα1¨XeL~sGJ?VxF>q"'9ED>qU҅Ć JgIKOAͶ+t!6{˹r}vw&Y68$* -xfhXtJ $+)IA43x:嘽Ϊd9W:g' Z AIG**jl+y+& ﯝMNWE,,;R 6jJ߯>A Rjᅖr57{9Ar-HmKc#.J8a+dF F&\uR)Ur>m9ؖ-/V'oaKО;kђ$.)dK ^YBM,i ˃]H팘cӳ``hSw\sl ٮeR`q3ܸ]J3Κ**f]#cN c,kC,`}=6'? u/YQBfŸB72:'lW$gu2`fuRtJ.%f )Eb༷ qA3Z̑읕΢W UVgGu#d7+,.g|!a[ovNrɳHF`T,!>%pAƇ;K X)wcf@!$H;򷘜4I() `Z3*Ojl]fJLRy뱄UӓS#ry(_&7m >{.fh{jCfs^c?~26jT)Ce/r\D[Tt:UDL%T4^RȨ1fAGYrAS. ꘭ѥhrST\I9R8ۑW4c_,X<]Œvۍy£ξBz'ǗW6YM&CfDYI\hZD묑lp* HNG)yTZa(ƞI6AWQg!Kl"!hS;8O₉y,]M;EmUՀݼ gI/41,:"'ma=as62+AˌkgToY 9d҄51I8Fnj2v[~~RZX~싈2"DܘH De%J+X#$]l|ЌC)uYS6N۬4'hI%ŵL&h8EW gĭǴYMKES̀.n'%Z[ )3.ڠUv/%JF 5`CќҎ}VC¶d#2k8?֍7x?f ?::z&ӶLWu/Ծww]ߕ*u_n3 E)Idi sp|ep;MOWgߟӤxXiܴ^sO{tCL1I|| 0)!n!曂LOCvxvܣiFM:cŪ.qkƈ(Ҽ r,{ ˚yxCܝd~جƟ׹RnysAxW`#˱gMN&8)cyGD%+IqAaVq@鿾Ş?{{ 4_hq?M•oUL_S";FcVwT\$gqdۯB,@>"#ޥ雋=_Y% |Req-[!!P2hOBU++b6<7C}VeDM*1ڜ18'*JInZ\ 2E%d|DFs(H<u/ii fd¦ TCIq@D/ 2%֦R N* (AZ IZŬs֞D_?-3ƐhVbP[bӮ,fn\rY$l, P БB6W> d{RQ]J u;a*I%CLePl `%'nEE F'+9+0O=8 5p ZVQ M&JR-,jl%~dp-C}ǜT"Rs lNKb"Δu(X7Xa؜a̰/Xl87HWLvFHdC PQ"Xq$02X{ Ex*dM'0M?i/5ezUE` ^e&;6B͠j`o]TP? +1d aY! `GM7XY:0L;‡ r ` ʮ"L\Ρ8()$ؙ!Oź(58[*MZlXB{S2N!(Q(ʨqM<Бඵ7Xո $qełm LcSW D:-Qd^ `#hV6 H۝@VAjP7_&$M\&2Й:A/F1!/E?1kP48_W)T'tB0@Ce; l7`g ~ '4mKƬwF<뮹SKpP 3 7^#x:p2G&-ҟJF%J-G @˥{wS yE n9X hh ya-AԒ"4E"eLԴB*cA>8M>) +]XX{*ċ@%AYZ{[fP܆`m@]GsC,TGWBgj@bQI9ҶBM fBNA/1}|m n\ߛwu2W)Kb,FP"Gށ]JS0 s  QBwAK\HPFt3 _L(=,C)9Jr@Q uCY.Az r l+ lw)X <33`pRꆤ%2<+N A?,v]`QmRg(H̔HRd&CeCQB5@բ`<JüO( AVQ6ނsjg7j1se@hU T)JK`˾O\H d2tDIH՜Z'N呵ZΏ}[ioś) ڊDIcTq`m` C'X)*9 X&] -L9Ezʨ{!1?MnD*P9b/:㽱Cs6VFRߍp rQpY 6iTA$`G \!Jk\7Ե߮hdo!UQnԫ/鼭nꖩ /uB%٪\NOGIl -6.IB~ռo0뻋xi/瀿=icdi#_7q *weD")&$R(ђiY</s.Dۻxq;v*{'_<"?~F@7ZKY F?}-XD"߾'w ]DW "L x(wqwqwqwqwqwqwqwqwqwqwqwqwqwqwqwqwqwqwqwqwqwqwq9i)~>cWg9lT;4XαosL˛[?߬_D{x7wynZx8^\śZZ(8Du NLzKdEQe@Uϫk^j$'DaF{!W9nyyJu,p샛 ĭf{|_xD|oנxDDoe_2msi}wiHY7+dh,-Xr&m%D_cp%([ldMMp㲋sn؝˭Q ~pz^m]܀aW\w}fsBl?Zo&3 N^co{n>:7߾t/իyngɺfM+de, S8p*^͝d}-rlۉ *꧗7AֿʾrSn_Łsݢp#'K!#ɤMUt@\#tʮFULR%)k=tX3p Dv5]BE)wS^]9s}?r}˓GOM6548Yɽ^_@r>l)/]NC۷-{N֧;rj[~wL]qשd{(|ᙢG3\jȽsK({R9)XO$akdXݴCq8,8 aX,40~R,EԯMg'C|^>:_sMN~?;ݬ;; 7Chf SO'+WM+&ൣGI2El YK.bQB갩˽USm'<2 huqv~0b 8by%1 jˎǢږQpL']6a-1jB&7M7%MA8V7c]IP`bP]T0)Ub*.I頋q0 8΢~^^ځ x,"e٨`<ʦD=\^*)R6Q);!pQ? EqX8W:vr=gò䱸g\d\gőuEzۥphEp[qm}t:::::::::::::::::::::::>hQX׫F f(Na$ğPr꯷;9?j>b lPdD@ʂ/'{4"UˊBB0%Dv {ޮ-t[fz[~q7A mʿիt;:|~s$o{ p|isc8GN;pi:< q#3~Cޙo>~m|p ɱ|oq=y=X_ΧM'vw]XK˿>o:LwƜY|ۿnYqF)e؛G½QfQ(=ʾ=,?J! N0!+` xҨ]Om H)UM0z qߓ?@F3*Lp31:"i^O۽>`,nZ{/n;ZY}!3K4{{{{{{{{{{{{{{{{{{{{{{{ٷaf9IǗϨO:z6}҈?>iDI}΃||>;Ed?' h ezujD*NVhO5m&eۤ~ZxZ,ª -FqJohz|`pIh>uvx }`~l<wݼViFu7pthCVQd,WjMRT\2`:JR\y򎙼[~jfi#z w߲r:BKv:`v:`v:`v:`v:`v:`v:`v:`v:`v:`v:`v:`gښ_aNy$/[Ή+8.IsnN1R!)H ~]s0L5ua:(L0tP Aa:(L0tP Aa:(L0tZ/ɯ)t)x) xB,d4e:J]svW}frgf$G5h &Ѣu V)"@`4}Ҝ<%y\nӫͰ;n]*_} 7_4!ƵzZJQJl&'=6}PY;Q{cدiÛ`UσQfI ] ڥԹ[X|Z?VXAwY?<8?Rf ׫ݚt&}>)}Oz^BOWJ-\|t*i|J`0ɖO4ê~5?KmKBtp-9/D&(@`yBX: )qz=X|OA;s qO]WffRERFB֋H"*,HaΉQ\JL"+ fg;l+@^Hhi<+]?q.}L,ύ~S9~|F!ir:DcxԬqx=~=KI/ FvZ%:*3,W_R0ΎWGʴF x@Pe\%sCNWRaňh h}h'c>Rq(|J ?HgYLSDD,ʝ0rʩa !3#8`V QM(1YAz =8AzE x2Y !f>.f>ؿs+]gڦ+?+U?0ISnȲbb{A[N3R85)%(=OE8EHܪq1 0a lΥ S.<'ڤ),B:LH=}eQWp{ tVr$﮶O2uv@ݴU:$U)ry5X8&n`@KR3Ӱ$n}*xH1< h2|tby^@@.N6'uI/ep\8Yx;-Cq4&6cP(t$,eL ̎ \YYf}Y OfB9+&ԣ@QnϮo  R#&papSq%A 0D*p`C3 `<N#^yO#WxW.b0I8Qq̨*ZXlA tb; "3p%֋ﴚ.}} 'x{&!&L>cB+T݋ujTrt:Lhҝ_R "J =GJ"8wOKP^/S~Z?nt}b_.GK+兛Ǩ/|~`Xנԇ^9 f!DX# 4曦Y>j۬GPI}2yjrܩ"{nS(.R|q,Txgh3 xƒ~.j/wuDآ5DGZ]=0n_{O4K.ڥRܬa(oVZTt Yo2_ 7:CA=̎ipt!y~&aF9ۿ;gmT>LGUغsKZnޡٰjÍtIS;mO6v^0%l &ocTgjꦋ̗ .;y.$D݂ݐ͗ؿؼ#Q8PGĉTEoVG=ر[ΥD.(*X9$vd ٜ, G(LGQ߀9|`؟~,8SZ# +hSHry&NyɽQ(;{Z^4˪ӛ֯Gtce} zP>fs⩧q۪N<(٨%VDf>zFqQ?g9bT".*!h<.9Dž()QNL %T3LέF:jw)A2d].oL ]XWr@ar6lU}n@w{: MP;jUQ ALy"EfBHyp2ǪdsWL#,8ZI}Yy*?>ԸWɬפg`JCxٟ)CtZ~,H}m(o@zopya6g&\dszPļbrQiAMPJ]C WRc>r(`O'W} &ifg|~n?i9;"{I)0/ Y9a38 ?0zl^~6LCZ#$[ж*nn;[^|3pn6 /͊ٛu5?`W{M%gz33zl؛I#mR*"#2,D` ׮z^q,w]T{`ũs|*1QRZhgHSKkpDI⬡sǝqex)cd 4$x *jQ˧1cfxs77|enWm]x0;qry5~> 3ND$$"*D8E-ց dtFb-3ịUh4BkM̨3hQ9b0XJB388gZ d4Ǝлk';J˦>E(a/1"=WԱ dKJU ?%IY^ip#Fb-oY|#-OԚzeB(1E14a˨P*&y߲ꓮ.V']t Y}%i6r  21&'OJK%BsKStba Ug75 ǪEu|;Yn\F o) Z N(# qS HRtAIJK S95>sĥTUVc7WIJŊz!$;s JO\%q;sURt&fŧMK|^mގ>/g!n^#n7l4 lf..ô5q䲃gSaTM9^e 6rd{D׿h{i.#8-%?Kjk M CZ#)F,p#pFµ9}^~x|sƓau4/^>2Y[?`x{3{?zQZ^Es̊e^]>#Ōa8'02Js̨*Z2(kTqVAZbp{)fiR|K\O=ؾKl5QMz+5h.5NL}ÿb|-èYtyvL5kB}|NS`BQH: IG!($tBQH: IG!($xI:bi0s_k&3ܰߦՔM$Kᣰ>)49D'a}>6qX$K~9:VQǸ?9Rh5Q4SGfL;!6+ ;cƌLuz-#chnDdƺah{姃.'NTiHɔa)$(I>U'ppNL 1L?%XYyg)0$5x~b3 j$P7_>0daH½)yfgD ),f*cc,l =̏S|=v$z \/Q˔$zuiNN0ږfިvle۷p>DlDN(eTLN:oV[.hTeB}%G LS\-&x#32x5sf"ܖ18-c>fӌBb j Mo2ؙ]l6f+ߟlR,?kP`t;Wnwm\xMk\_gJ(PUEf)Q,R}TOUuЈ":rεG@W6ZQɴ3"b) !{D%$ؤB:`^e)2#vlGl;PP;vEfjw v&^F-5Rü1ML@F{#B#)"r&mY1 !f`E;$`H QpXG MCUg;VF}4 b6-"̈(;Dq8\12'i(e:n":4JYQ+ l: 0XobA% 9 0Y1s)g`fDVxu`-bY4:imqQeEbwAk1)"LRq%Ru)A0`j'vuf<Է=@,>xK9 u7Q|lWn۝:~ tx`& YG/\D)Z0la% ,ӝ ǝaox--C@28.=@H 3£y*X{OplE45)lpXش cZ)"V"D`Ԓ: u: [~Xۤ /Jt;Z-.)JGg)N_f#ikA<Q,fD}*P;ɗjl)+I4C3[0,cZEw]nMb'HI Y/"RA8X&"9'Fq)%2ы{J^FY;ZDl ha[0 ^И{0WuS@b2,-Dm2( d֚{mO!}Vvk߯̕Ąevf]tJnZi]16m\Lo2.tTKo9,l vsoVd%[Wn煷 o2< yf}Fw7ے6;p-MFބp!yW\kueEoQͶmRK26ur49]SYnm6ͱ '͍+[[k4 KkcF1 )`)$ŝ~'v_5=jؽy`'8z 4:aJ)v` kҲH 3[\sx9Ѭ\}`*b;fnܫ{^uq#"{cGKrjiG|-@#"ᷠQG!5K{Z86t(b)>!sgJu:0q<|i|3p52}sum m9猨gȰ`#,Ǥtf*XMT,z姡+g]7><kd۰ZVy,K4r]~m81} &Se>k|=n[BDn.z $xoL &l9e5ӥ&[Ie,:uqic`)-|0sH[]DS>$bePzRb=z•IkV*8ʝ7\`ń3`NLj9+j$ZyM{4kGk=&ϣI~tP]mnzD8Fl8H^&ߓ-ħRe!=Y]ųp_|{+pb4r k 8?;U#u/w-%mFVK6S0/ Df-J:jURufd׊<;ݳ 7 + &+eaI Me6l;y-[ u{]_~Q-ZIsM ځjjDgaY W;Ĥ"^^[96.'˓^Cٷ; R+:]JZϾ&e2vڰ\%Tm5AaJ67k@<9^Lq/5^:zˆ^FU !`<)V&Ԕ1тFQ4`pHn~]ϚEfͦѥ^A%$?=e^z쵲2aDD&p TQ͍`^h:vFzI:?X6o&B<Ўbb}.1L$#T)sCNR`0)FEeEbO~iO*8 vj:D[ s΢MTFUKM5L!w&bXQGtTJڛnZ'ͷcзcrx}eGsgCQ#5GQɃ::2m9xK5`0uQzj'vDH|}B1 9<;9R⍲<".t]<`x gKC&vHg8:M8%ۓBQʹ׼~"/#a^zi0) 3dĐr H4BA`HO&Mg#kC<HI$$E Xi+0w< +DĄ`>utsItݸAkNF?zxb6F1($Bl Lf}Y?XOj.S5svW8$2REr[v}m+/4B7EKR-7 BSop<4b镇4r? ܘA)f 9QcFUbK J0) 9Zbr(ĕp7|3 qt4>z Z^ Pc/NFt tt"Lhғ_ p!Nx#+4BENӳK{Fs8r:ZXYnC%ǯ3ԫn@ߪ}j=/$X2ol&T|*q^7˓}r)X[p_wnonNgR}mq~Rؑ UA{R~XƁ\tهamJ|; ֤hs\b .Keq:|xVן.wχw?ԩ~e~d4`v }ِW'fG9ږK `h49=>Lq5j.wՓἾGo{O˓>O[8emY^^_~eZ]{ _.'yy?ߵEޅ ̀e\_n7TA(O!]zoGNz3a0D#*?f2k?0~ʼ2-UzϷozo3D@T;ujHmY^CC%>7o:`K1g{BGhn9ptޘm JYk:hIw8v& &o#1e&9iv`xi+u'~ŽpD-n[ik:ryᙧ,6%G .rousK;Жs) "0;NyxZkzz+2 g~w$pF ǙVhSHrSRt13#{2\r=@/ % t(r|G˶כM7z{7Y/UrjF^xXOf(7S,t)5(?`z9JG)sd-`T". !h<iMi^"\g>T]'Ԅ= 8eAX/i Edpn5꨽ޥ/ْ!"uyC'(Pʍ":ǭ#P( -J))$17VXBp,dmآ/ !?1^1ӷ$|1T>e)\0NX19i0$ ZrV8A;4NDu) xԇ񞃔!IG[O?gђ7WGv䚥TDQT?c7W&r3)| 1/QPZhgHLu;zݎށi"b9Q8kqu\1SȔa#hH~:n%Gb'6ntq`B:zh s9=kk⪗#gZ{۸_=Ƽ o9iд"vH$ +K(Yko:.Er3ÙyÅ$ 1<Ap@1$rs­+Oa.  1Ws4֞^1VD#/HKd A긨E: &g=uI)i tUyJJm n*#qQ .ÜNvxX`f(syjEKM2Ig&dWJE;M#j82㌗2T?2=!&ZC`4q'ē-Ė̋Usm&,*^smb%;ʄb/F@EbQfH$q2)P(/MalpN{m"iH&i"Ay,bb%2{Iuc;%5k8E8.(bH4 RTRŴ;rg5IçϠ'M\h[LAM- llS95`i%xPb)gz Zj PFb&ATDHYg'Ds氤.RE([njo=&W)͍?Y+>RrOڭ/2xwuݶj{_ڴ)$_+ J1d)XHWQn3T!F5FS;h\hCDE5Z.9,e8ZQP4, &h#lڴ 'YNzƙp¿ ?s>[zF-/Rw{+uGlHjR B\DN8a4;(*1 S D1GLlHl܅%'Ǘ+UOyxlxQ +9AF@;*,hQGyv< kMVF=kzĂ*! ETiKC{{. G x\LѮl~~1/rCkv[8$0IoE-+YĪr(F]5CW;Զj|߃e)K9fOkޫ)|YGIR\}F:o28Eh窈8)Tz;W2g4ܓr_*@T&ng |,^|?&n)n"*?u뽄E Go. D4;r yv}_kݢ3Ӧe\`u]i3Ӷ{=J:|VU>^y=}:9g_fUdT7 _|iox~-˯q7⇧wTGgx__&=Nizfzi>؛{a'̘1;??|SO?CS;?ƟS㈆;o] ޼I_A7GK\_u?/w͆t_ :M=%XUTC=|E/[QAn?&(#Ssuz8ڰ 5:3ڞt2ë6Xn ,RM6a[1@[ѺmAxYl[u iR%ý?oI7N^'Ό! o-^Yaϴe{8>H" \Qb01! MEb˨Goڰ;L૸yLMjr~*^q(lDžuɸĕ+nRZ?뢔#%KRŵYgΉsϿu'Oj/U`ⅴ[Gg._lM궕&jo$@$ޛҤ- H›:뼪.>/}YUo5gިRg :?zѩîeDb'mϝ@i'M(['qkF<'_BUށFeXU GpQL95ēh3#W(Rͼ"Yk3? 2j."`n.{WiZt]Yϵr>vϵb+m-1fTA:'p*ɥd X`pELB*q1,RaFEň@T uqb55#5giA0%:žC u<*.% YyhQg@\ޓP0Pz>U?&O 1b zfQ;| hge2òfvc #g0D"-$+xmIkr&JRE #Rk4LjX] ]5>exã>\?HTzmeU;6یVT4$uT4BCHR,0{$G)1b*b,2lQ8NJZK0K![^ 2LFQ1c6K!BJ  z"enktvzI =9Eu>]ťw( ]OYP!))Z'0T/B=ǒuatTZ;Le0/qvRj1, TrtmJJO$J"!% u hݯFiq ,w,,Em hTm#C62Fq1ʈN cE$tL#NL4 ^)IoȶmdXKn_)ݬ+գ9nC'sgTYJO{ Ak˫ $FH%EEo PQÒR-_]ZXOν ..N}rګۯ]꤃[kW"N]{Ms;$S3 ї_R]t}F3V{enFS6#xϊ,\. L֋RĜ5ײle j#c5s#c=R/ɤV Ue,T KʽMo 3^33;;OnO|vBj2&@(hڬ\Ozͬ$B4-*,J68ˈq$g9J kC \Lz/() h&Slǐ"e`#DbviZlGl?+!橠vޱ/jʨj7y/^:cɯUaXtDNԷ و*ˬ-ӠPY nl6.xT(B&fȁ !H&HEo8sܥ:W;]9+O}TFD!/tP*AE'!l|cVF)(|2}0Mir39cI6kr3>tȄ!P$-rLeDfvD{=⤛;)uV%"VElpq>Z"<.o`) 22HiP @ge'22C-Jz 51pd~j}V=%_a˲kt3>MC?Fc:?asg3C^>U }Azl}4{ f=?s/X4`z2?`v 2ހ|3rX|584/M>.sٻlઘŹU֜<\+\@VgW` \kQ:\+W7z1p\հd}&:oh?R ;n+zPc:}1n+0\/~n"a8~_ti 'y+Iōōivcd/;%t?SCOס`#`~\8|eߨsr .$:,v(p*YNYks.[Y%Kr+oUY$nzza1-7^ V\x(ά9_*F!\6:YVjeLK<*=ZdLQX lb.*V 0jJӟ>\5?*暳beWdd ^"\&gWd0gW\8dbWJ\@B;'vU 瓛Uu.pUUX[nK,fsA \k:\+56zp }GoU^wfpƓ-`oE}澔aYҸ\˗ d G& [E~']~_R=9򍒫7_~?{:+8smO]o3'Zh,MMGwSGG() :JbdF2֠)(2r˳PVDrjA}m?\ n&=k\7&rDun"M争\7&rDKn"M争\7&rDunDun" un"Mbn"M争\7&rDF予hzg즣ot }mz; q*:Ď`Wu><*E˂|$ rTǽSXwV?ՏT>Z\Z@y$D!KqUVfqD<0-qdz ڨF:Z幉I8vQ-ssεy]qRTG'Ou@`8yj6wr5} ?:86vLȢhA\>nq9Jq&rӜ CUF%$-4C .4A)L2( B;J 2 G# "YD m4F e@I.3"!+H1H *jl'G-돏:-Vwd=mXl/NԜf"urpzID`?N{+΀ICsRs:;gorf~7}а& t.,AmMwoLץ:sh}2Z_gϣKUVj~b4tiozbHp] PC6s\\;E;5s}' ׽_λ?HOs} =¼S:ڽw:uݺA\ǒ(7ޥR1C c?4 ewz0 #Pz5<4=鳟kYzM"R?vļY]_r;G?сř'~ǕSIGb4k-@ 2rmiAW7&sI]rX DI;/:_`:bZOܥl F[)M*AldI$LhV(P ]LkR%Y Ke|Aΐ5 EĐ y5sϘB4u嵷ƻ ̛d Ezn83<>6LG3{P${{5?]m\7|; |ayinrg 9F-x+OMZ sŗm☒"˘%"GP$F"@.3J \섍8C`hu^HCiKQ\{Ka-svH 74p`B+p YpG}h} OHϞ~f6oJb_}&?z 5P:DItNdN kmk4S<&D 0ꨜAH"\uK,|֪h]+TPP7_[8nV|faav˥qoF&%>WK+2\8)e:/gw7_dElYW~/HgvtBknz_\gpp37hzc{ޥ$=r҇YY(zK\߶{ ҕj݂g#?Sl^gX|ҒFꜩ}:u>im2c%6DNjckO9Ӿ#Ʊd軹q}KX?t'5f.k)0[&dAK 2`U&"y,Q,A -Mݕ0} 2Spυ`Qң :Rc}4hY-Y͜dfz> k.McGKx/qB⹲XDҴ7XO9'%r eX%W']' ؤT)&8㒉190ZJ.qO&( Nw&SrG;i,y ݢ0*pO\ >rjqBxJsQNwy=d=Ztڕ4D#ejнܵ棊ЧRp;LvpyLH;N\qXjWAZo{Hu̮\_x~2=l>x ήƂ=S#?̃ld;٥"R'C?\ V@ =FƱ,2ØGQ,3h88QziW*rUU +ΞuR62BJNJ1O&rv:.xQ'kTN9k?]a|Hzϯo뗷??f 7??Y4ӄT*Jmo!wmZw--ѵ5m>u3kt+gI뀶6 )~=|O+Ի?elΜ娦䇱_!̏fd6I&j~(3ǣXKb3j]Uٍ[KckcXN3%'P⤬JQ srCGc5R$c(Uw!7 7vcU`dxR!E*rҫ" ˤ6vؙx[jJ#QmJ[Ϧ;[U^tKaz.6ph@A\4cU&,UdrFA Uw i/h1,~ Z5('"~<2JSiRYK hp}ܭl 2rA|ZT*'*q so *UE0vIqZ_VGOg#=M(FL^Є+ -.BLd9sYF qd]1gd}YZ]ut;ww sH}}S~\] 2[cAyf^79|<ƣql=>1!VpU*'˥L)+;{Oz&M8xǣOCɂW&N1!em&6NЙ 42}fQ<:xȶ  zFg<qup'magqt6 sOL@tBkA(9iS>琢:6jOy5%-v!Ä0K%Hlwv$1-*EbvEb"[yFK jIF^5 %QJˡ+#ÓZnIooiǠ}6h@ ̄IAlmR &=XFIU$R}gk5,WՓ6Bo?A?`pnnUw}]^fZlzDz e>YW Rן@c9H.#`}䩐hՍ)9K\b6QEGPtk:p!Yjfe"J82 8faP-u drC1IEspEcPdRh]&p)vYc>މ쮦O/Ä<HͭH.*j E|55yBp܏߈&xII^8!I 89ep*@7 Y3i񌎧+]Oy1`I3IĽrHRdXe[ D[@);ӊ)p]r93茉~'yY1s/Ș g>/?->x4hD_>LoqwLIM~|^ʹ;ݫY_-aėgҘ?J8͘WGJɤ=MOKo'SBý77|v~t4 %ƒ?N/7 MiC<%gB?Iv7;j&t1Ɨm?a= $Y˗/g-,_nƣFt'iEyF [֮`弡;DoqW$T{;{MOũ}|u 3ŵ3mWyf?6"+n3˖%!jT<3(~,zF+)JJR04Z0:r;9}mr .C(lN#B-=UF[ĹقYOGi^A]{Eq\_S5qEY>PdHdj<\4RP r+gW19Aز-rKcK">Jcza+F  F&,&;ij B9H.b˖OVq``K0{I&S2%Rr)A+nl,ʖ.-fjN6)iVM\7K>nb]_JzJH!YƤA[BPT͕+!+WǤw=9b#vElbOm6eA[lUU:tPK'Bfus9N˞Y`P[cx¨Ҡs6I\HsBq|͂FrĹ5`sM62i~2CڧyoޣlZ^jI* !6Q˷|s͗K77=? \ϗW7T&ECD,#)2sQ(xԈ&S+t@0̛e:mrYkł;;e9sMQsI3KQd4 6!W)לaV+8oӬ+(4ь"'n 6$',0r )uHy'[Yj0kjރ-c\0! 3j)T& FX%ϹVPcs64OEG~`WfΝY-9'\ ],rc2@p&gVkP .N⤅n7B ݿT4%tT4C CHRe d=O6TKs8hM'6 Xw+|3^g&FQ10Jd+1-G,i#2;4vrL/z-ךlj|eJ\.XQHG"$%%Z`Q뤽&SQwўy,XXGku|'26ܷ[=3?-KW&@dI6\# K~>w81sq_! c~`1c~q6hk-Kn9fwIjdXOթzϩb]~r|8l/>?+#^ÕG.>͗_d a\y3x?mj{7X7lU*(f^,C!'gorOQ1 D],痠`ަʺ+BZ#.W@i9䪇r%(In`xhz1KGPP#"gvGS,=eEiѠ򾼙[ ST` #<,X},Dmn"MUxeo~yY*M]20N 3sH0aOUҰJ [\BcrrCRLq{EB_ i5\!F'(W}3"WH+e ) W}+é6>EWlo[reoUZo?HY?req8HsƮWr_ ipBJ+z2rekގO.Ẁ;p fDU#J: r q 5FP\!-3]+\ROU-Z?}tB%(N_\r/GC/յX%`.PIMHP"Lh#&B&q:2΂?2~^C%žW;&I^<%.*ET"u9Y[oľ۬1R&\(i-C+, -Ns9ګڏbV;i2ͭߧC%.wX m3r!U!/Qn:#mo&\1n~I-K*\'ogܾ=DgPC&)d%N葬? SRle愑 Y(bX6̘dq"X Jvҍfc4Jp?6qmYiK_X%FU 4[eK}z^ožAȱ::[Ѕݷ:Z+n=ŲyEKu#53 kչ2Qn+uJT]Wf?JW[Ue:J+~dy-RvL׶yEKM":s\_ϵ[S''rn)Ai&K'ULBZλ> )0ӟ!;O،č:l7^|}*u9~n#*̔lqvxlW^<p/Z !K}Sl^UȰOۈ ѨÿJÌzm-ErZa1o0oڱ* BS-< qMB!]֐k ڣkrUe8m[R mC[[9O^M#_cέU_ MMOuVY[FmyYZ45JXkG=! ԛ KieW"C=)#`2"WHT )0GR^y$W̕Fz3^ { 䪏rx$W< "оҞԃf rC2VWw_ o p?S'ˮRU/ʂ*`c eBZɺ.WH WOE?jQ]5'LTҊ 7T2H}*jSͤIXI\!Pj\18U\ BZ&.WHY6UkkHX\K/r'АR >ʕBV㬼i!S=V_1P̢+aV '[B}HK;?:iFSjF'z-R ;U .ӟx 3+BZC.WpQ!\iͣzC jo+=,J[A䪇re?N*Wl7rV"W@+i+d̚>gD}+V\!BA\њ#JѧYS[nOjPB`c+ĵZAe )䪇r T{$W*vo qEVu3 rCk½9YƸ>Q\"gT4,謖FVD;~`G3V*,ρ 5M/FN/ẀO J3SmFkO4ވRuE\kzJ#B`-+ĵZA:/WH W=+F #`Δ7r"WHt )䪇r'` "P*uBJ=t(Wl!jfsP]Nmo]oe !7Kyy{˯->nsn,*cYъ;owj}+zQ)YNؔ(Z[S1C#q TJ 8hU3ui%")bF .s |d~~PKʅ<'RH5/CĤbabP(nb8*#We%!8/__.fRl. 5Cu}gWW? ޹_o|ziWk'&Z(ߓ;z:\Y_ם`q8*b\xzMJv^Fs(K XU3+$.Jmah1 )N©PᾯgB4ұ:ΤՌq KG&.u̵"Mb%63bHj#Zd\%i*HJS\, ͙ę61¥ gY$hqk872^ 7Qh:*1%wC7;7-Ħtno0__)2#Ԉ2⩪!&{~y4XNw 5M7loޝ쳀1U{-`j-lM^0O"KMo&Svcgsžt 믣 Coپb >׋.ezDtl`w~~N _6ғkmMh$v\Ae1W.!.9;s2(+CbB&~zX0ll)V"Å4,FZ&P$2m/BU0F%ZLdREwIcccc>ӭKՕ۞,we=Gث)Lfl$YA'Z!ф0:,u <@#!"Z?uBP* UXǹ`˘fNX fE4D-ǟ<v&,dce,$ A'ma:Y>q^sP4;w$}Hx=zN'q*)/s$YB3(kqCC&6UYjD;df%db9мLDp-R ࡝ب423TQGeĥ,YƵKҒ$8L"EIFI(MZmgx]gSȎc4a`%?A@FqYr;k f;տ@3+}n?F[(#$)SQ(d̈qzb1)$3B@'EJ#Ufofɲ .Ju >qdH8Q<B.[ Kʸ " Dlugj{ɍ_Kr@÷" f ]ں%$_ꗖ52e3=m6ɮ*>OXCjP+G=OyZyzW .c0hI$B(A @$@ 9 yIQJ#2 G1Kg611Oح{9H`ƴL:xfoOl2x^^r$!xe5:$1R6)K!BJ ddx$2@Lmp2]#B}ӋUCOWV>T=#y.*Od~,}|*hZ'0ds<$!^=5aru{*QMBSY;̷5;??Ҵ4[HdRۥA)-((>䕂MQ?dyrmO"փd8 :fbZ!E31F!euKE:[=aOtgs:⠾?̚qIk2(p:)CY@]A,R&VE&%,GnS^qcp[T")%;+fBe[;{iO(z)22 qo'9>R-+B_PGfD8B`S. ^[чǘb f4 ې$Hx.&'yRS#*.1 CeE֝=hy9oL inz{R䴚DNgϧWa))EI&)NHW*餹 C &-!e*PTeX;O[ěDړ]5K:%_g5)9.be]N>I-h7*Rđ22HiEwe'22C-Dj..O-8csG0agngtٮ{9m7ukFц8G?R-꧉^d4ʼn64\%GTnClBWH.(G  u<('?{dCx %h=rmdhi)5^q'i9 xPd)G C(iWFqD<0#Zfg!6.xkfe *dт&&<D4voL[y.,ImBy-*ٙW/ ަ2Wo|أAG%r8:R=io (e"d_dQrRFs& },uQҚp@٪ou!ŀ&`f( `JyV>s̓MدUL6Λ77_4OWNAYJ`N"*T^jDPFRgI:^y( r4-iݾ*A|isa9۞߼jj0L7SӴt[iZzkĩ4-.l4fjd˜cНiEMp§~hE1kelf7YXsS0˱1/9bYφo qKqn? >tj]i:[tEw:.~t'4KXp:!El.)> J-z"s21qCgߦIn Wx7OnHcq{n^؍#}f]c{OVw@fh4:ZV= ;bW"Je4 5nD󒦍Ċ $| =z/ݥ_zKv"A&*n4 @+'%r% T$RAC1 Kծ%ԃ.E6fuVY+A=_aa2WV\Z΁73̫;f4o4+<畠^?"5d·]nkFwV1͕o7 #<[o4< B^/G|&)(Y()aA 9pr[f'lX9RX;{/W4Z!ܫ4uyw8 ʺPw[×m}nx*jfő\ϳCX4勢hȜ65^S{z0DŽȢ1sFK@,SΣcIRy6Yk!Z*]ૈѯ8ݼl͂M/Wucɝ]~Жj\B.~u\0^Knvfqi:dz']tߝ6Hw6t*C-on~e-oݹ[ɎVWws͘R;QeO4]Ggިw[j[-qay{sKt=I;-sps͔Һ:7_WEbwl!eA>Y 5>Cn61˒ ;xݪħcyaKq}¥|t;03_[\ "aLȒnVt! rcDZʕP1=E MmPޡC/%$khYXevrjŰ2lUdnrMpMɽ p2 `=GcLOZ)%w(eQ%89$-Y0h{u:atƀAPGBhsC)BK<}>Uv8-絧 贔Ջ>(b<'fŕ1u"" e]I!і^&`LW8KK-M(VH\eImDL12E4WX`S2r[b߭&[pju"ܣ82:|l` sYqh,,2&t%AjgO_.L`.'~ 5Bh>ڜpsřX|"i>ܴSNDdy>:̮\H]ZNՕIc۽>螱X$Sd{uƑ@qO1L]s2,]lNf,l5KNlK\3f[T~&5ik Tx3zc);RԜ}CiÒI_[I_z/i4bJHmHMFiG P ls? rR7ܨl[b |P›=R?7}ΐ-tI+SSRhJ9[6Hs栲,hd=-x} utwwj|LzV`AG7S!i&{=3}%f &GX\X-ɡL#Ӂs/F]aS2xN  lBKAVŒ&ѢRiMN.1Z˘J7LI}d0֐-/ͱ (AUu 3x+ں{> oj%QB":(l}L"Y|M6; &O HKli$9zmKHoj9dݦđMϵ::~vZ~~ZCՠH5y.vNnl+>E4kK`⣋yA7͹Z$TkW'*8Xv[w uٗC‹ sφrfS .>6)aW4rңԪ A˗ocla Z2{xb#LdBlsjIh-1VmuKw%&gl1:Ng+o*ܭng3wO{A|t1*5tOv{L[4Jįz}{D8_ C3Qc-TƎF +k5Ko=,fV[qdng&98: |j!ƶQaMtՀ7A(AweOKhі*yfAA׋~i9٩3f\u69ŴfRBS-`E7ɛ=zZLma)[՛ \U39@|꠷O7V>uݵV }-S\Xpl`0~Rō=, w/s)b?W[奎ߘ? Z`\jrE Y vQPvdp_d-Yfgqjvh.V3njuM+VM翰 cYݴ,KTGdJdoRkֳW{[ִEaV]|z3n5ݳe>;[ym̓9;gŘoʦi?z|?[ٻo>yQy|r7yit-EO.Ͽw)jÓsc>~ke}3n;y=|lx_ZS{q?"`Qʋo7LvsKo.NəodXOɢz6Nv1tΰ/׾0r9?pn/_9\`t+ZzŸF Ee~P0UuCw[L*r}zvniי/oXa:_\~q_N:*ǻ>nܪS#ߖes]׹nR]clJ؞_|CGܭ=_~_j~u{6iO}]tbnuBNcv:>K7Y}eg);9wa1}}Z:s=>񜵳<~=MX )-*sFZ,& fV7M#Oo=LfXX`I&{ 56]/FuTu|a$c Gvnz@ϷT'|^.np{N :3/|X`FܼP;߾هqaG_JJ_/,~ qQjlvK}ۮ'^6?z}F}킾-Fv5Jl>caܳ eћ*ɼm3qKSձǜ'҃,U)%ֈm @>7͜-zB)r[ۣP&~Zx}dn6P̏D1 zVzEbOD 3mlWӶ'dSN9䚽z^~䲭lSR¢\ST{I_A>* g̦(}Q:8vͯuME˫U8>:88 G_x]ث5BQ^+6#)K?#8 thX:4ZgC82$gWv =ʞWe VէW|˩'.nڏ̮f)l\7pu;2p4F,,E /]rYֿna K[3ۯo:c߲h=h0q0$9wqqGe\3+ 4.g-rXdķ[|sr6Wq .}ʊ !=}۞+tI+]5:l뙺j 0Ȫ3hEYy\8|@|rzWGO3S0 AyVWzͭXz0jh=BA8RZԋ]=O{zU2b=K{HO@Տg{/6ח`{!Š4zGSsag5ۥ5TbA7gk<){})5g4HB*)j\0-RZQg2{4w0w؂y&s`-WZ =K&6VZdRM[S̒*L7 ͱJג]\C6LZM.grŨPzʗzgօ!DsĮIq>s#_1=g$\˅k5A#1@fp2Uk-+rZ Qc 7x4}|tI#jq=:yMVKvL os7CJU&QnLKṕ`IHP9 LaԇE鞠NlRLE ;\i@`+%s-R>Kh?cBȷIhDP4v?Ji=2p4)m]Nj$E~nu>[iN_zxH_apq ,H6T IcO͚ʼnƒH0ޚnB3dR^ "Ĭ/2褱0ȒW^A E+u i)]0(VM؇쁔scEn1 x ՂE64p Ak!N8 /upNk}6. L_2X|C B×% aȫ#6b)c :+ ƅQsۮϪUo]KTbLX4ـlk56RRh+I8jG ʗW +2e, P)il8`kQJMLxedLhHp:!$)i, xAA *]ӴM*Xf0If ɤdr2zײ veDŽ#7Aw&U?1d N[N@C9v8_i,b8f 0 bǤ@A. Kl0F|Q$%;cX@ʹS0y3X^",a6u6hoI S1D\ ̑&[-߬#h=Hl oPS_!HuPW"h$tsj\ D}j܇^"Tm__} R$Ed8$s[@ eG2@IL~hmňؤZ) )D:37qraQK&;fg$'7cD-D!LȈ{al|v&^^_3Z m&"?]@Z$L>u`i0sK7` uܕ,e_#(=@2c'$g5/FG Z 4I D&rzd^+2ȇ|ַ.O#h^{K*@KU[@Yح1Hۑ ^* UgeS Yߚ}.ɶ _EܱiFܶAM.k$icm1vng%Wފ<)mԮKU("#qF$4V=b\U@@"Q(9@wAKbiѸ#lsEe` f\An%Rh=vuRQ :(te %aZv̳rF.H}4pZzћ/$LDMH.n=vEpYAdF'#f(Are j N7 5By4+ KR7@>V5UoA [ 0Z]~-8ci!ud1mpb"Tw PfRΐ6R;xh]42{Yn%T27~撣%03 6`4P­<h9n9ۋǴ{/)憡L ;: :G qu֕@@3^#܆up0_`uh ]TFŬ㪹aX񬵤LK8/KGhk6#E YiFf4@!(!A/[ñ\zH FF6 9CDG7kP"IipwLt*z+޵cٿ"a~vӽ `/=i4X$+Ƒ$;dKʖbt1#<@z;@)7X Y~{=L5X!X_ЉRpyr ٚ)hNCA^g#Dyje LjjӢ9RYiȣ0Q5@{=[I~ GȄ҆"&Y)F-fjRH 9^{=P>s|tw2e<(b dPC\m O@ײoFr;/7] 8@*$k o֐i~ ,%fthYf WB?ASqCwX#9>d85n7Z6{ِB%pUݨvy b:BX3 4+AMjQAFRȰ Œť$襁klt^gQ8"!B S P{ c aWKp!fzNMT)WГJ,bj ہBݒ$Yc]WȢR[:{4=mGS%?8׵RPeJEZ*'mvO(4?8 E1yZhG!takToX%+|HMФ(OSK h/g?1E1!Z͆ J+(9ŒF 'MI[c1tp, >Hn^tt}9tK.sG Qv]]YVBLo!ZaNWR#+'-%-BxT)th9|2(9%HW^_ Bu-f#_(%]ޜ2ŝ;IC{~~pŁU?T;i+CtتJ]7BZ !:F ~,gBBBWVJj!]I ++IqC+Dҕء>%z JkRͮ%Ьs39.إ6O9ƪ\WHS)D72dc.Gx.RZ{ ?cJjV~tQv?F'QռBfs?1#+c%ܴBDk Q:Ctute%A+IC+D)@V[Q]!`[]$R :>jD)9,**gV ]!Z;xBڿ;V=eV }-B~eWU?RWPѕAWUϭ} O =~h :]!ʡ[&zi +l|1tp/WC+@ic+ Fb *Q ]!ZNW8#+Uwrg\+LgKUK;ZdڂIb%Uݝf9W%tJ.֪2-&EZe J#)9ŒF;YIb :W ]TOW{#+} qzUx@kuzc/N#+ t{\BW~6PJEyg+5B֔BWֱ/FB]}3tvzwʘ``/\zC/~( AWUϙX)S ]!\J+D Pj戮sʰ kŋ+kd)th ՋЕ䒩R Z·NW[c+UIVs`q9k#F}.D"Z/JGGjI)i˃o,fj3tBV]!]*I]`[µ +}IXrUBfr#+'9%+̅-*fe =PzEtutf5rAjGn ]}3twzʤ/ LWۃ^ph{vS6R,|DWz֢ Fb *^ ]!ZˆNW #+f Bup+C+D$]qW]!`g+(C+DX9gA+XG $-⌬$%+d7+zMcށҧfhGreCKڛTAmr۪ӛۺ:6Df![,*kiF]PVN$Y:γZ < vbyҠ0DIGi;W,*Unt(':B2Sv]!r r#JKrHW SҤLCW{C+ Q*Zyt匕$gNk]1 кC 7[pd/֖ \Y)th3+Ugfr]hOFWa6ǰ/['MgW4?ĝLU.GQw…t!V&X)} wXgeSxKJci2^_~?ZjɃ={Xv ] ZV3|}Xvi[b4&D1%3Zl3r Eud?)^Ԇ%˘Q+0NF MdW'xKpNKh3l];t}".OXЮhσ6/no޼Z@Oǩe^wwy; =f&gk_^*$_OSx?Oې\|?q7 i% }y/^~/W-=Rַyб0+~?mg[Zu߬xq1}*GXRx~gn ?}-(U)~ֱ&WY}D{>mJlu*iAhsy˾T@x3_>L4$knSv<-_Y5]~ۍkX뛴J b-ƐlBX>jϻ.MWc(JZ)/F7гB ےJmәd-ٯou\%ܞ ~OTR}KǬMeQ;ioCSm r^T (Z51JyO*ehMõjdr3ntl'ꬭ (.7}ҽ.f-z{jq{ݗԯލ@uw_v/-%ófx_b g{j%΁&\HA@rĐ J11gՎY‡Z\`=0 a ^[ lnX wTNxRhhRۚkS (L {èV!1o6 Joכ*ev[f;/[vDmz 5R :fߺ"Q&[jQ\EM-XXB|lIp ( LgDc+|kc!Ydb<舫,pIN^qVT1Szo?[%I8z/S& oH"Hl4<'Q<kWg$@@Юև@}XK-mvWbkz}gvٻ6ndWSMUyp$Un*vjv]*\ɈTWi /$(Ȣ/"9 Ѓnna{Y^ċ.4Q|\?4B7l]nXlaIua=k9WOgkO8os|;T>9zW2._iWSj|J27yt~FT=?d:+N=9Ee[ f1 8oVsmrR@͛ 5o!jSEq-8gqE9po\''W=oz^[V||<%kfYʺug7>yOӷU͜}{~ o?d]8y='_r2i׽~(Wr>hD_̏=hNh=}?LKƹqSWW0.tr2_l<ۗ%wd{ڛ^Ǘ^Lz׋&l<(JJ%?5Dʯ_ч_WD?˲KΎ7G땧9:; aeϾ?y֌FN;_u->ُ.h>ZnvG3(H:XuiAm\\ k fWxT|#!mj>]ElxᜓKRPgi{=I6IcG212N9h\uBDwj66w zi+mQd+>j ToǼv1O㬵3r=!> hߍ)QXisҍ6D}]d|xNJ0އRzD$FG,ht []*roe!BJt4ƹ H@E"Ykc!'6Lm5rnIC74gV׶lORc0O~&,m3141I0J}{Ri9Ifmv{=zJZxap͌9SB M3n G߄rɌ9n?rR2rܓGDD< M~y;f>ah#+җ]Xr$ re9HX2I[E.*UA=%Y,I04q u٨ !992FΎz|~0~_B+F@ZZ[lW\|7`?3C\O|Vg|W׋ Ze; -fb. I*, o 8 #_uYP o˻,ۜ1J dc9xց IzR 0V;+-C!άC+%a5O-.#H, 3AG.AIӂ GVю yOa얦4"HR;P*)1 +b8cè4f(! E$iûu ,h8s2yCt`Jk=fC wyRR{-H(cktFaig }YEn  6[F+eO-N{m[3#|>4*0_]UܭMŕv+a ȓ nɲa?)[Yp-g\2q4fGox'< ˣɔ\3m5C[aȜd1cq=kIn Ǔj ᴐϹ~FD߷G eoWD3ݼV?~ԐtJTa]р1oLI;/h8%oĨ 0ozh޺7k^NEm]/B&Ƃb3gjfW6ц?_Lۺ/Np9~mI8#mFlfvUVy%{8ju`ŲDO[nn^damuqFtZXqur )+bGt]GW#gohS'u'~u.b|J?:y^_=?׏8y ŷ/N^V L12HP$ }?>64ZZahkj|w隷a5- oz?_|Ob\xj$T/FWQKSTΆ^wRMȸbxZbɂ ]!۷ d'eVB'(JdsJ!rH^z"XM@ >u@c8,t^ӊہ"y!Hǖ,C "ORH(R+\0QZ,A(DxwS`b^1H$;f ҲJ {sS华7n\:˧qt_|}RڕyWf V \5VXXI@EEt?Clj5@ILe3D?>ik% Ȫڐ]%?xRhkk Y2jº^=_LOvIS?(=4MMOV-8rh\!˧A4Ց%,R$LL sAhɐ]$ZE(}(jOZ:JK&.(K^r$)$+ x.'h##5@ &1c^iX(%68Ɯ l6H/6sXt!s*gƴ3nLrO&ڥrC(XȳY$ 2sIFA#TvJxCM $NrIUV' v_lVm9 V348:3L"hQ,r,dy*gV]iD812r &m hcğH) u@ɼ9;AoaRh@<`CH0R,2&:nme @ =[aN)͹"sE~bWfΝɬ#  "=KPc,A&A UԊA:t!P0UTD2 hP*Ar,2Hz$&$)mXR(l8(rìf`e&BRi.K!""%!+)-&X>!G2ʴE-wD_jЃUc}OWE]2+J&%O,*z&@%%֨tN"F݉G=XcM/E:qȾa*K7A/&ea~l^vGԤi,:~ !QO 9S?7 7bc%5@l~5dHh`tq6&g@@.@K$qN&eT̀B'-1I @v ] F#n3]Zmch/xU$+t$g_O3MYI38yS72elXX,2ɒ XݡVEK} Qf͸`:XAy>$INX}*Zq#j 6`CLdːq䂬L-Q"; 8Uu#`ݪ읳^˨ӱ.c ozPب,К:(dG z(x. nC%4$E$ UB/"' RIkJfceAFGoK5R?uZO:Xn<`fy.8Ix|qqwi[cpQ']\qnl& o_TjMtL!̢6K+O<i "1hlJR|֔`UDV]c974ײ2Ϥjkjܭ*LjqƾPUօӅ {^xb1{rOAA25C@( hrDO͜$[̴wVp;F&Yz( MC{> &CR)dK&c28$#Z1ƮF;ib֮jm]YkNkwM8Gv Gr2Ò 2o H'0UYf혆OɑϢҦU!eȁPthH$$KJQሥ!K.]a 'E1FjDSY#N#vqkVpBm)PTRG'd)$B lC4MigiQ.kÉgB jH1`HX9D"vQjܭߟ"=@zq̅:qɾzV֋Ӌ^ܺO\KGZq QFʌ]ioDz+}Je6GgTg:)x a{ؽrf*…v6xKvHk>>lM͡^dwg5[j,d߶͞)!OfAwhjvW9`E&MC`RGSJmΌy:s1O kOָƸYic5`S]O (/^pp"Mz|oU<Æ Z/@6ri`k U cJ#68R$#Y@SJx́ @Uv)M[FmY&c*'OcːM{ >t}6d)|Qv{z%y b4柆N.Pl'cG׿-ٻN]WO$r"554rY~UrwÆwUەUSsT뾲m־ӔNVzԙ'8$ >@-Ѧ道Wpf&BWtui;r_aM\Nj7ð_b;i|Zi+:z&%dXdQ,} )xYBG̉3Ͽ|¯N..$='в<'[^vWM:y-*-fE %u\*~^(lRyyuʮj<3F=/~~K6<Ԣ,ePnZ ޳ fc3lY^1Ag8)Qnan_6`z s`(XX?p2ZXbWĮ_wR ,¥g؁ f(U%u0| \(Ax:5}f!ʍ"O֑aYqGkZ yc1BuȸMZ|3s0^nt'A_}vws/V'KMs›xV"Eg4~:+ƙ{1U #f\`fseuP@z\Z|M&>Ф!F zTC!EP$ @*GD0(.D&fOJ78`1wځZDl ha<aFrM;7%z*X74TL֞;n>U 0e1%oˏ6u< 5CĴ|u?| M| qdMl)j"' >YB!Ԗ4@i* R c:JyIQ 6VYZkiS6o:4ݻ ,LLTk.U7W:rhd1SWjt b?[ s(]?'WWtZf+{JPAy^ͳ+/ 6Vr|+%gmdopny؛pwZHԬ_% cϗ٥yWHKXyalsBMn4w/\"ϐv`4rၱkT V0to@r5`B-؁5f($ef"(#w$fOx=k~hV*clK\W-|Խ^=׬U-;;Jb"@`-DNw$>KD|1T!,<JR},pWIJW 3toƝnwaFmF;K!8?PB̀YyT<0ʽ]0y_kWORN`L,Sϥee`^U̷ |ygϸ`?ϴڏ?:AE<8|uVCK1 菂MZ4 F݁8qIlb2g&,$+͛qd8jX#1E˵D7Q3q}TR,yZK(ΒNshr4,E]*`xGcQsvi[MHŇ O/< |ϞűVBb6ik]ko+2ŗcuupo GQxh$;-vfii$Qn;fW!2R  [jʹg3:+wx~b1^y1@.H$ٯqt^VΩo˪CV>6Uwk@©]Z\+J&HS*m%\iѹmDYf]{Z tn;om{q nf[慾kf^V`B%<ԐUCJ[R h}>Ԃ۞T0v[1$TOzB -՘\:ѩߏ5iɭf5ۧ ]uSu%SsNpcڃ~r=*owW.jOmߦo|!e&h;\5Fݔo E'&}I˲#;;\6qk٩;?\QcC|z?\f.w4DdQmbV),yRG(A)cXD.2yTYHUugs4mQcXx>캾c @9f i˞Eˌr$\WEm2*y˘WFIUY|,䞫_=j'vޓ]\,IF6> +½_pn+D<˜`lΘ`#]8Zg4IƜTHXh,Ѐ LFf>1Ƙ n.I"ZqY:\ +]v)b/޷VfcJjv)~4 wx: Mmc1~᥎u7ll) !?5oBVdC(l q|hDp^Wtr<%=G03ѭ:v=`җkX2,$ re9HY2I[;ZQ*+1;% &d.B@uB!NKP%WΎVr7y "ǔLclRqKŶw&X|tsײѢ?'jr(1Lłr۠t9Ie2VAroT#HB:},#/,؀$`Qd`0Zu 5N7Խmؕq(aRVʺH1$rʑ1t"YI1- ='v2MG_\hiEvs Sb@4Wq*Qi̖R#͉dS>9mhG*+]كgN&}#*SZ5:FYeBGm?gJx._H1bak՞,{.V|{.Otz#+eO&m3"%.X}Ge2Mv)+g O2x;B'آhn)&8㒉vph\X08X2g--Q{2p<2L˗pNS<竬<-pR4DhNOf5Sd!4TMVxNW# x2KvשsKF7{eۤj $C=2A\&"DNɩP6 lfnfXX]}d5S, z,i3XÒ"5a6&ߦIn9ݫV}wORƾm^rwO6ü ;:/D4g+Zb݋N[h)!@*"muSKY]ny$2{o3MŔ>?^o]:/Ǥ-O~ ?}]BKg446y^IMT5Yʗۂg/:0=0ZHBɊZ k4u ۙnWץk G6,3_6n5p:_v|Q˫2|-/ɫ_~t9;I@^ݙ^ {~0!i\"i9m?zYw vq<%k󻏚n[r^V* V7!7k !H 8(XO V}@ =j0YR2M/$$Ι+2Ypk%F4{A paje{?X00rٸ/ĴuR#1S!ou|FWz/7'C8'aM6\5nʡjh͈(?΃7ANxEZoN ?/`tW0/WiP0KnCBOظR"s [&#gnܦ.v<,k)h΁f#ZM!HО2e'P 椕N`H&2\6Ր+=_glHa”Lrv`K@>Yh0iŚ m:킃LҌ)?ǔ{JA kJ4IE QY0A> 64DRdU}o{FuDZ|kf<})%ݒI %2EDmd,{n` &jk ǘ/N9hR hc LPk#96D6!bYC),VqAeqZ~{x2p5ϊQU#2jgxJ%uP)Ut@0ԛeH V1hf3Mֳ`u9sMIsI3$YF"W)kbJV+ǪzJ# I[0AhDEc48 Dc(wUΎzkrf\#ty_2>q$QK`6qDz sŭ0ZyT*k~a~dWfΝɬGEz" XLFU jųȓFt%ԾSQpL"AGE Je@#~b+Qeثqd+I7 .f3:*FD(]BDDfKEV&S&[M$}B"ʴE?;"4pzы}Qj"rWӕoYe`ERDZ c-5j5'LrգzXO.E;I]T-n/a~㵛ny)c)œV1LNYZvo#q˥*XOSGiHnќ8yS72elXX,2ɒ JXݾVi, +D5. `e5B@+W̢w&9aeh6 `DU&l1 !EL-Q"; 8UƺՆ#av#ۋU2Gz:>=&|)%SH8BhЃG#pATpb%f1?& ."- W|Ћi@@JfceE6TKݍ`)Q0Ӌ:X&'w:XD%?XdžQ8'-W^0/.N2yV<ꤋ3gjȑ_a]jr$lӭ˅W+Ԓeo{8$E)Rے1< 47KʋyѴkט_r]!&]"^j&)R>iύ#2:)vR0df=V0s$@uBbpE*$ tJܪ$,e6!H5*943B0 3rƏ׬Nn dF_G /O <Gl tBC$!180*Hӂ0htT&ѪX_C6lœ6T^r'yP`Jh27a#tÈݘ: a<nwڲaԖ-j v"A^"Z)b5AĒ4Dr!'6k'!N BFfH9ZўEX"2)D ){Qm]1uaiOwJ30 "6?ED0"[DAzFh2UR.O!e)A W`$f#M mg$(*/ keTGKJi2FَOw!.jpZgcd_\ nqōk؈Rs#e1Vcdh|"Zoqx6{?M}4{|9҈:OS䉳xC;8ޏHFc2?N s?!@?pU~5 ͎~yL*xA KaO?g/a`Pӻ gƏh7?ؙyx\NB`i]{cq8Sa8?NC}Ψ_Q+FxSPPd 8w+ TȣU4gA: ﯴ: ;BQ !@p2-QEA4I_ZPy?׫򳠬Sw|~hGw9< vc1H +5Tg çS-w>]:̟5[-]q@c ޫӂ) 2-x'2\C ^C ^E ZZws&ֈvxf% pB3;)56sMMtT&ABH(AasEBL:mII#m#=DGxyӞ-e4Et5 .cs%76X,l> YtzGP )s]+>P`KϥIuG[l>go% ,9 qMy+ce %7`A4:w4<+[fw!)#zCYלVZt̼5ٛƾX.sBl7JԂrkl O/ O\r׸m>m2zB!+X{@#YX Ή(\Զp ET:} o/f5k>^ Ƅ  0X,SJ $I8B8 " u6{LRO  41;cD }21HUk:ISlOWFJ6Z'BW|64{oD p>`-y2:9ؑ2R\Z|k$8C|-4Hec` I (= DhQK^E)D kF.BD99HK#J/Ԝf0hX.8!"N,YEfc ǂ|xcbTmbAmy[jD5om"51ā闯tN buf/ɦҴu5hӣ/_Nk^ʾo;`+ 8S"Zjkh4N0mp9/D|R9 /Wݸ^&s@-(Ԓ(OzHD(PU(dhYj0TO`\I?b nJ#kU8e$<SGm6|'0"Âq ВZi!BGDj\ Nʋ TTs/[ؑmt^+Zzׯ?Ho WXfgIs,HNiU`h C@hQب_HtVr.S1+h8d2M-ۮ4HJ6ėZ8g~XQ0$&Dϓ_K+cTiIj;6[qm)>Z<_$H5™` M:Aco~h>pM,>y8PL7Mk5"iR*J.0fk\ 6["BkMhC~`HUgRkK${ylKʙ؁ƃI"H&?t-:+&NmwrEU#ʨMUVRafwSi]mjd3>=PJ.} 'c-^Z|ҨA|ƻ|PU|!:~ 0g: v/k)~ ]'hT,HS .u )3ɼZmF$jTJU7:vx:;N\ͽqv lz`dHk_nM&dZHfȆ!p@թy:$=8^zIP)U(:]tݒgt9\ixv?Wh Y*4S )f°LmV3MI,=u|^S/;.,Y&7}oNC\oxQM~CVb ॊ\*Dp&1'RGe,\t}33ooͳ9In;~sMK"]k 9'K[AѺ:(& %F@/@nQ?>~KK dŃhN)}z(d^ ³Ǫ b(דAInTC4A8K*~WꡁSYqT8"d.Fl)6YL#1fc4E^ %fB&$ZͺV|Ai%~#)_a~T%&sc^0n=L>ZJH#q9c_IPC&%":\ -\R%B fl LT 5kY]e&.JG}t7Uc!ls ΃H7"R&)QIPAj5xf<R; q/3j(CaNH Mfɍc$Y??<׽(L֤lI>֐M(om=c_] #ڀ:u7A'_60QmIEhoGHZi"gȩ9U]8reRa(1dm\E$ 0i :d"6ǐ/~E. 7rպ/H3$p$&(!@AУұEP`C t-pjB՞4y],bcuOmۨ*>,ڬ䒄p&2hFXGmt4I$?!4}д!i滔ʔ*`wh<%cu.+ބ쭓CilHYg)!XcX 1YK^c6 $6ʘ-Wu6Ӹ?3j΅?)x*#Gl|N|N(9wt: /,fj@=8$u沾㯓.#Sa 7=9~ݠulVKÁDuzJ()&ZKlF:_n12pHx ^\7_~&x/|I_&}5:ZF:ꞿq]bxd_._-E/^\xcmmR;mN n( v\c'NJC!tz-Oӌe~u۳5 R|'hp>_앉"Vɏgd5G4y BlJ`_æ˨͗be6Lixg!O'Gˁ]^sa|zYi7^UۯMWtVY]̠c>9=IfʨΈ?r4vbz}ۗZ։_6o_}ͷoï~RWr)&yuz[^uigs" f=];+Ho1lWQ~zO@yWMR]a=/'}sy߅뵡~Yy_=u9+<c~G0=xG)kzh>wIx\ihaRK)]YY u>N]gv__w#g,ѣ"~„+rЩũs籍|WDo~_,ܻjyt4ӳl|_F~a4oy6/ɭ|$UK#'ӏ㚆t^6gΎj~}p&qТ?Nh)8i:vuQVB5%7PGhmӀ0э: # TZИ@90h1S` |H (&Y$0"'MiSCilVxKRѮ'>maY+M3=c`t +%xIR Q{C ds)`n=ɞk/64Vaf\η0!-/X%?BiƊLEɼCa@iCmhZ]6ɤlb2 eHr)BdFhhiAJ'GoӶ+yZ7BN]8;=^>g9ywx)m4d!%, c U6ғ\I,V4%b ޒzd f>3`RI:TبJ7n ^;>NO˦ձcmLr=-b`O,̒o!87ܛ2Xd:t&\bl:ws¨)Wѱd/nRВ % % ]*K6: 2(QҶE,: )ejIk0z!bKEuZZCf ݥ|l˸Ɲx@k;}ܰY ԆC$N#1O YlKɒMɑSxIybMqf\f _mvTEԬ8ʪR3"IcAOJrFk$IYਫҊQdE&ࢲN+$l,`@HZY3q_3EûI*!deCIV+@\1K/ 0I 9x &>~)p['HhVGv1 I"zb,UQnB  zW PJGOxҍR6$%uNN  GP0 (MI(Gst1kq4q;6N|!m6Cx0;!graԾh#9*WP&OiA I0pFe Gi=k"5>laQeM "KD@Fk dU}N"%;G;X.M/v:i}55Nj`Myґ)]J8KhLHM&D }Iwde;> HТA;Ř@C($RG=2 C!Î2B^g sR6,T쫊+^O =2 Aq_pz=zZ}>r0; .bSL͝PRr)״|O1D 9BtPM9"Jx5N6$ m$U CR8w u-Y/ ,w7y뿤N՝N]t2[8uҳx]*-~.b)ui2^upNT\l ΅:@D/:LIh@XVwJ;^'tB%i#&XH&sWiR.{"M` \ۺ]G)L1DQ9 ufjlM51m&ΖVCw;Vs?!=. 0FEV'bcr b~$xf@8FiO.AFֿ2$ HF#{sScCm&Kj3a)S8ZXm'ZXJ 3<L8ٵx|yq5wӾƥN>\dtT eW"ĦLR,"YXU>toTR6cFh.Z|j1Bn䢥b Ge,lյBa'36nK2i/4/|T_YEffٻdfh/~'thz~4~ۓ &cg 0LځO t\ֺqfZ < 셬QiUݦ.YS>N 3en$bⰱn&;N' L;mcm= '<2upPN@bV>Og4E TD m}1օH`PɱHJd87B̥f2&ZK7nؓJiu}=kx+!C29hHX-FJ*XK`LN)߶G)uܘ hu`~&ȒNLZccꮾ=b3qď:_u ڥfZr_"~q2 9 s"NZ;4Y%(""*4#0;J;}cxϖ/V{Knq1;}ܪ/s˝H(@G[>ZϖU\e*ֻs̰W3HxiNgܵh&9N\ţ&lx>cXtBz;˗~[P`[I|3Erpd _kno?|rG vo'kݗYYr_a4LM]U*.}qWH!v]U)W讬C $⪽jJ 8]9&S퍻ސAv]U)0]}= 7_][oG+D^6Y`_ u6 ${,a#}S$CRS=MĖim q+[]\^.zĕJG?zw~ o)1wtʯߗ07UVG.jF?^`-tXcbm./Eئ޲dž<_VQIp>>gLi3}|޹>>L3}|>>g'eL3}|ϻ-gL3}|9 .&Zi2}|f>>gL3}|>>gctGݕ^ ,>>+>>gL3}|>>gJL3}|>>gLv>>g3}|Y.fL3}|>]Gz8G my?G 1`8S'G#fB`f e,tPQ G-czgb'HI"WA8XťDnf 8L{UQl=A jS8% 3Bs4 v]W3֘ma<+9oS:Rb&0~0#eS%*RG 'Sy) a0!wV;gPdajg SdX`B^1HyI(X̉Q J9֚{mrXNa9!DDO93!Ruu ^u{6y'7vm21[14 m:I/ńuJP*]LKo?RtgJP޽MyW;ԼPrz ꫟n%ceȼ=ʷTM2XzweΖf=iRmL\\J:=!~+Rs˺ts$:=C:Lʼn]Nːފ1$?C2}C2 {zIB3>F\ER; p`X,Q` e +gj44cdxt+ܳG־9 ;)k4:44;˰EJIĥ)<ܥ$&1F: lp6HF;g)Oc.*5X@Umug[Jn KeV \)P1Wl P>:5Q[{KmcVN)I{h\<8 3 PH8 S> N 4-wHawJ:UDSK͹3f[8r礻8Jǯ⢱Tup^IX/{^ؘfL>yĀ0e6GgsFT 3dX fcra;3a,\ț[sڻtM44?Z,r_0خ,%-g i `J7k0ŠϴS2:L/^׆ &uDhQhcdVn4;j:k>L u]<ߵ i[WAuY:qSbٗ1g^/כЙ_-¡󥌷FMw&P.p"PƠ_t:˿ \JAU?W?}7 A~٭ P$=#% ˻>._{%HDH`ٻIZa`^¨`>i#Gr+la%Y3zDzcۏJt \z52p.TaFxԱI, (A86"F9OlqTQܦK=Ew9lVJHH!mZvԪ:'[ԞpqVp' clD }N}wST1{>8WDyuZP4lY:TN7| VuP>/F,B0*㑅H>HԔ1тFQ4`hʘ-tM~U U*]kVkirKA~F:hc{L,c05% +&9NxQu^|+qnY9 #ߓ>Bg%;Bl#(JIFS8 .'68A,_k%zAw[sU=`w%9KeYy"V)B+AjBI;OVC{ҏ׵=5T,OK@'N,-4nha>3!ZU[B:_ .tWCzKع^RIpB\X;ue0*j6;? L 4~/mTCghJQROU},zrr;\EJ^] A qi]P. wpVFc4 WU;ɼ?f_O~,T[gk:/[ۏtf삁ZI]gS B -qF[b|}KM͐f4jlfq|mCP(i~mwapV j=ȦV*"󱎓?;`Rwc^,wS+++tSĿur˯?}|߾}[8u30 .p7ugwgu@gtQxM֚ꦩb;4d&|vhs > +Thnvҫw_߻cj֣ğwJ;6l۩MwqU+qU\Q -GC1~ bCazu--v=D(:ջWjr~Kvyr4,'IA)3B68# u]8и Fs056WzzQUb7'46\ D!}>MDI`dk\*ꈑ4vh(Xj#VGjk%NR.UĶ[gu<ɄnhW6eFNQ*N|&RQQWP~,AF 2.k 2P`Ch*ձUV¡UR^!\--Yv9콉72auUQRcx4;bqZ| oI5fNÔDgzRN(tP!CM>ll쾞/tM$.rgBMY13,.f#f.vnٻcF8S(Df# JI"i4(0u4 +F5Zi4Ge>dq$ʃ?/Rj++0 'k @^,\BR6> ] \ei9tRr++5Jh*ˎx(Kpvp^#\ \C GWY\.JEW/ĖC/N~~zTg$-7umIQ<DW=5s8"q4p>'ä*K \F8&v # \eq>8\ *K):zp~yz~*M*~ϙΫ]YB]_>omn{㾟\W!-x ܔ:0!z_$E"',)ۨ)ѫGLHfd]@.XZ vq]@g [$TO0p7Z@m96,t{ 7r#@WRo%2~ྡྷ{<K[Vna`\js[O! 9i7>n8R`<$O1\s)"M.:1#ɑOAF$֒.q[Gh;Ti(m?}fX,J( s̷Z 5 _MacusJINv +!^2[?h0ק}^||~_BYKhˀbW&BW-5 4\z7$R a?1F 0<0DMZRK@= u$)OU(dY*0P%PBD 9,P pqr&2\w*GA9KVPYК8[j !~t}i4gL`tk؁ol.&fiQWQÛ oq<8yD:"px12¦0>19|y܅guӋБF!BG%)ԂVDqąAF%q'o˨-\uttA+VgF_OeX`yz%1)h:: u @;խV H/2hX6C&C RԶfw /-6@)\^0\&_y9P\͍xSjFR$- lӀ**D9 VBCJ4||0nƯ4R. D>m6ʏoʹ8gˏ! =)]0vPj(ئ٨ח 7ۗc?g#MC\ܖ}U,=۬>T9/~XӤ"A鼢ղC>>aYMCoPk/G7vT<(]7p>LG'{siV?n* ST\SmϛJ0l]K{F4O[PӶ*кA\y㞝|7,G}T)3Z~?)RdhSF%hp\,|z=zxu+r*F) Qg;w.-!;5F~wNc;e4J) 2:>BEr\C'_'xE){'o =P+$҅/_N)st6 k{o<~=;a[䤚&zRB_WXa˂h|;Ι秛7~!Zb KT>Ap&qR/Px<#qE]߷s>90!Z:<N1gk>8]҄U)'䊜+9N& L'q`짖$Zs (G-npHtHqk.ng tһRq;l@Of9l %u/a~?.Ģ7oNW>̡2rKXaI`'r ǿq-$ϖP{K-U5 >'7NWP,MJT_]t|8{?=Ͽt3ٻ;[u=0>k"(~wwn_jjoV5bjuoQm65ga mwrqCיYyQLԊte^}e~YeOz.bq  b7Вaޔ WYғ^owzzaIդ$Sg,G0ps)w<rSp 3+C!DQZb,n ^H*^:ݬ;!;̦w1ufz6o{xHߝ]?]" !&֠-Y FAт-[dejISyIyOٙɻJZjg*Ql:5x#zz튯E{-3k!DOzdj#@-6؈d-1% e )` G%Ol Mg?D6"I\Y {@W{zW,au_)(JEGt,\’\Č1l)(o9ެ @0hn3NNI7 ZaH]l f"rRŔmQt\(EL,elg69KZ (([f6 9[B Y tc=k6Blvjx"AR !)%V&(AYI$(cRI"ToqJT ||)?7gH$y0ܨ2T#,Gm೒b~$xg)NնYMFW){%O(X&.&kƊl:QяZ-G>԰G$ct4be7_zKjV _V[ ty˴L03ӛd& oF,fxzvNy=x9GhnQZb.7_Oq``..YdSD/B|P(;`9+GE|=B>yY ck:5IPX)lhLI12h]VZMc'&6LnD% Q,*?ʢ̂[?j5x{ RБOw^ &,un}qfwo3{]F6;9>O CR5>Qd|2zߛ軋w[jwUDJH[:AO,)wCi-KqYR:SѤj(%0ʂe*BI(OZ+|ImkKj I"OaAi ^UzY1]";Lr~Hn50򰠧}Hߟ"ṛs>&sjݵ@hY5Obo2xtut7=']cj'\D2~]ٙ?g^[j7[%?(zOfwyU(/Z{KiyJn@ҟ%8gn7&na4otB"+K@d9OӅL d@QOQOQOQ##MH Z]"9B"4lkI1E+tmbo:F2xB"'\ˢCU 1>:E016e9cj:E*B zr ظц<6q\@v]ymknc낻L?07<:D/\0ƀU!1h"`(:emwNIwf~5=iRnuӵ~sc9oXvӹwK̔~nzیwy-v~uEtZЁw9JW/)*;OQӌg>;04=n.{5(HOc{yϏ@VVu; mi[Y{?j~,ohR"6>Qm_4=˗)=B}9ףO"V?`wۭݛ_R_;7yW3rYVak X<񷋋qJ#KZy8?o{֊>wj́H˼VZH>Y3;]Uprz:ٹ%~U{;!߿OR[?\?=hJG/W=Ena5 ןZ]͒VA^3Cyf[ME_KӬw~pz;W~%F?fv ~Y|s 53zm;+|"/廅cqο?z֊ H 1sTC]v|lay - 6jtro:Z UR.N]ōE7^WE-XH;juer _LhocTz;u+Br~nϥGͮf?(.&x=7 kX)JlHf{a{#eA8Nj~/=hyi^s3!mUY*ֵsHH:y# "d4Â%F5!~ot}Gy[ǀQ vk י˔_L L&>bҪh$+x{'ibKbb?߃ǍZnmō%휆/xe}REJ1&4bBRd4:02ق3Y5Rcd޵-q\_KrEUze=خ$:ʌ(IYRzp!D:8{_wDF#a*\qŒndL/TI%V)ִvpf啸:%oY2-}Q+k7Hm&!栙TB›7&1V !,lBT`hD!lMڠN ̕9ƽ^DlwVN{0WOC*.9B"idE^%} ڸOHk@>mv(ysEU▕X9P՚#ϱRϰ6;VA ODK}-wS乮nx;MTSp1ZD 0HM,l֣z=Ąb!$`Y* |!>]Rrabٳ xIb)IX C%Xe0'!Hdh5"fB*,t&,g 6ڞChmStn{d`^F DcۄuZ)Vو0v( :^-5 X q9xY#LIb(M47sH2Xʣ*ڈʤ7)n8@I_pL[i}vo޽IX\I;;zBނNs] wTB\1 !MBPڳ٠ 2h\NFatNNeHK;&PՀwbF ɸMAC[)b"fG̤5!vAd I(oL+]".ƥ 2I_i9 bkVJ hUv m*5c@(qHseETY$o/Ey@_Q\*!B꡻2Z润.-%u$$EX 025ID^,.zjEQЯ*+9JO[!㠅bJa`"@So eXxኵ&kE\uXgLӄ@}3V; Rڵ#eێYUI1 E%m.,-‰s4B u@M0- |g`ke>_a۫3( ~\G QȷZ4]q`<" ƇK/(yi{`Vi>m9"sە Nz- ] &W$Zʭ􀁩p1-3(v5nV[4_jRaBG1 A1J#HĄ&5-+T^ehT. U U:]Xeai@)h^\ ,Edu}cЭLM( 2`G[m¢"Yԏ/XA(Ӭ(adNI/+II|'ӷ +j߫kc<}-d*,hb⮆,k[0 w@܆5pi/&s7Db6y җU@QDhhʀvo!Jp Ў2vp >@tЫP%l@ۄU 9'Ō:hX,pZnkT=^pthZi29m]¢ΤI`&Ybm4 ?x2Z.mxFw! :"eJay7) s.CpYT24cC;|*i5kN_rNY4LEרH RhWT*[fy/ztv1Fa-B-I!.y͸t*2c%V &nҘ;kճNgG}8iώf'esMLn|,Ե+Z`8FL-FT`mS\;5,U$ڳ<54灣d`ѠЛq=(Ϗ4#}Ia&IU.2R ;:G4 T"* wB)WnV뛶fXllFVU.rI"WŪR׾yebBu"JI-bJ:"ԅKN]"J#؄v^Q銄Sq1yK<BRggqlvAQ3*!ң^"_+4mx\9f7~PGz4IW*>zB6 g?*]I014"U1/ivڿ@z>tq>>HnpӋeF t=c?ߣرhG|_Gnq9p4o&=lNo%wUw0 V^y{K DdD?u9Ќn`]Ƹ=xk=Ckcۨk575JV= g'gD;̵>X#j= hu+FsNGjwHpz6?(Dx|l5OX+rV}rN&vrlv1O\{Hq0ޘ{>0|'ۡ}`}fY"p:26.٫rvmeVpYz{ϻYlm_*?X2HfXlen\/Cơ];_ N&ހчO`zgumvy ƿ9ހsf nOp-5rނ~Kͷ /m$NW F\_PFnp*ZzX]H埐 `1sWuCt@;xƄ˶G);u H&acX-aF'/%̿~rS a>W?F_kךC?ߥ|ч2ڮTwעhznt6F _\lp◃;ְv@emRp}yRa=lߜ4o=}ޘ/~˔Nߘ4F ,1<:VZe7|^uo¤ 7s,bh8 ;ߦb.> (s&Bx}*Gx(&}O0(7J?67?_[k 3a߲yl]r> ߀W>i"؛;@z6oiw&PRqb}L A@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 DL b@"&1 0yފ7vԴz8^_6/_NŹ..Nw hKȼI-濶3A&,]Xl~Cf/ӥt7WOuQ~D?p9[=CͫQ#MߎPZQ)W.uw:z;%tߵEA싃xvmvp@ Y`MrvqOt޽/ vEǞwϒλk 9xvpo, vYP<43%CFwZRa?^ ?e"Wi7W>e:~ qwTp2tޜ>2sf aӽe7\C*/;[!8{8i?קnFە`*֖vkmT^~\f qF6ߺpNبy'ʢb p;ײJW(=Ayr)$Vһlv1O ^{ȫq=i]Z*츬ք^/NNGj;o;I ~m\d|X;?2|ٻS+7,y )Mp:lWBh8j"(W[nޫ=*qcFjcdZT;j=:Dn*q[NV67/}k˶W֣ >7&L9 W-kF&#˶MGNFh,Hl+P\]|s9 ^1鐶9fzovٷ#nXQTe?{$8s/zO >6ndZraV7MHG]ƬpHa9W ϖhFwtyqN ^}c][>v*{sEt?RtI{-I/x^MOG0bW"Qx/+KF{o rFt㹁_mK C>Ub㊷bþߣw+յXe)ʟrH9 ʐ~ 4ma/@q]fc2ڊ/Z@fbbH(ɩ!g R r3~R> -hƟv~7Sx9+am2=L~ү9[YhhÔ#k0͡E}Mh;ȷ:$Ze2NzkkEHF|xzra  (}#0G*-%*M)ãDS9KֱiG4f,Nҋ/5cge S:|ɞ珫zt zR?* 3w5j&ŴLAW㍪gU*Cb^҃r'3:r<ѧ ƹ-d{9hKrZ;&)]3C7%Jojr:Dc<-J5_4|`^8PV#i$Sqk(;%S# #u2p~v +4 @ tvÁڙ8jH|h8Iqmo`FV]>p|`z/Ӥ{:= Vq\oQu?5nKcSwocrt{=쵙dZWd;>[Y6\aѨV} m 9Ky'^>P}iǴPxDBxerA#S%Q3q6[Jg3m!?GSRƓOVTL 1lE{W ro*_bQyԑs5 =j 4ϵъJ#tXD‚R:Z͐= J٤Bͼ$R)3l%IձL~ݙvjEV[_Ӏ˨Fj<&o #1nm& F`\qr[>r!c2 C lH `>DTtl;g=,0|Hq_,bgE[DyNphg{CKAϕ.7'qYTIR"vܡEL2@W[E^}ZLKvcv`׮1f)5c(>RD:JJ:!H!"R$ij]| 7{?]iǮPwlnL*rܸEl__ָ:3;%P{enZ\>5[,|n7"(QI }(b~E9w.ZyWw c0eI$:$HOޗC;~b9Nǡ00A|d2a.bC13e(`j`.e@ pg,] wΘ .jASPL6HBԊG$7X ߞyWCR}iozU5ez!M Yt,dU_Ey7Yq=F@yEfE? σp 6蛣^鲣_$GGY,|.sIʯS\ki|σU<*핹mio4X9 ZIz#Rru^^nY%)[jCHbT#R͂?8=4\彚e_eͻt) EE6#C1+>2'Muf<i*_# A㤟c?3I2)1bt|}#xe3eTt C[wo\ewxhQt"sBvD5 %c-N>*`,||/xCE)pe\y$]0'sMܩ`J mB)51\'I1W{a | %KwI__6v[c YdKL1BUQFqWs=.Cg~L||Yѻ껬KIR;EIоKWSvqyҸ>iLƗQ-K_H1$ЦZxt:yj^h^o^i/bX]fU1р:C1ĜOb!;y>)l G@S܈DNh<6!`BLB1jv)0fXR5]/v&Ζ]{sɅ$z{!e9V2M'PXtD-"Ki #ōRs ƄTQ#2\*밊a,%V5S*#dGDM7{ jq헔;R1t)|Ƙg#D1H!}@)İ6Y(a O4YkaKEwF~7vGl ۺ#ֆٚ3m W󭱦Sy ʖG9vƨ4<0smC<0VcxC{DiQÍځzpͺZpdFq1"vaC K"% `E!aicqLa^iGjd>9&"wE%KɍU]Y-=y5l$"o.`n6S\ihr'Sc| /(#a<:3b !ÂT0C+Ù cav3xkOcԵV$VVl-Csn," Vו4Y&&b#8J3k͑^,X8\uzesaL}_x0> l ҄y3,[64rim]_σ͠i5n "׍oLo#^4) u|u?կ6vjݝlHy'TjqҰ"7}h0Y/;X^r+ln%9 ,%jXEx6Jk+́?ޗ_\#gO_= 3AC(I7I4C33,c0[|lct:;\REB֋H@T8HaΉQ\JLu1*aeqcʽ3ø)au]9.N$4Wl_BC1 I[>%n_?V/8v"A>^s\ȺXS /EN-1|9+B;/z SdX bb1'>Dm2( d֚{m]%v՟6qgB4!z>"<τ\+Zy~mZu*A bxKʐ:eSJ>{hA!i.Kg׷f!!e}Nγě3OV[55o:o[. : {c_T5TImYt9 qpҟ%{|g>N$iǓ_;ێz\΢3R{aЙ\,Voj"xsᘆaC|1pK-q:˨ZH|G:HXp5HSoH 3.B@9F?g7ZwT9n+IE$9&.8ޤ{($bobcMc^Շbqwa1gFÒQW(Z"vVVamb% SDJ)(ԭd:5*a[FYl`  _Ot2=U?}jA Vs\&c+rVl~\qYя$a,[Q:5\zWySx5_^σ(fJqe>߻ZY-$'K36m? ^2ZJ⌴꒮ڊ!hZ̼a`X(hԊi&MC2{W ,ZK%h}M%Jm ZЌT0a8r)ISZ >8V+KVTu ЉzuzחG|}/o?9sL>:Gm68$PNzEΊ򚢩b[d|rȸ:rK+ϜmMgrQz8j+ [t6[fsfu63mD曯0̯LCu,0LRw `@GRiRnZfKObz@˯Oٻ6$WY<"` v{zi]F2&%*R"((QrۇXɬ"Ed䋒RKKfRV(B`Ϋ =FܩDsN:" @$\v8,T^UKb{2~\ A*=!ȈDrh:HCS'=p\sy:;?}䇹5k`z)niAW-S=te䞹2*Bw't-Cw&Go1/cFykB_^->޾zZ;l|89?Ohx::ɼY Oߍ@PALeqeѯ4 .^Xq;ſlO@¤ @u6;=#_yE+,VV#T0YK/q̈́|4=tdexec1Ez̈;>M0J3HEc>=I 8U.6U LWsQzTtJrي_K ddL/gOOrIFkRC]a*Wro{!&9Bрe\h〳῾^Nؽ'|gw7'o͟oy|_^;lWQ; Jg17Mir-BX*Gjf|:x/A9sr V(%c \0+f ])"Ƞ:Y<+hTRhL0FAhz1BdjZ7uFvN T\>ə`\?*I)(e!?~<[ǠRV2:ARg4FL:0KB8AZ!E;3~96MiY]ߞ-I(wCV=jul7}.f̿n/f˸]/Η/jo4_1ڢ2p'] mzQ@!og:kwUdѮ+jA\Mim[Qk>z*8gDU/lϹFһa/:/iw` zRa^#]Tō;(P:A/zX< zx}+*4`"uþ:z㧮?mHc:=+?s#FMKjh7ߖ8Vo'20s{;nBk^ Խ#j鞓p:_f-&DQ ^/+2q3mk,pr,6s1O !/r/ÿ1oߴ#92pv{ܐ +12Nt͒G<@q/xDYH3'm`:21}}? 7Y?ov0:ˎ4X#euLa̡FQJWV`G^16bp_pC{ƅ{okR9}ݟ4ܗ~Ҋi}8R2ǿkS'd2 LNk-% U,5n g9Mg+ٳzf(o+,Kke3ޫeIo,[p,{CeL* ̕+:CFmYTP}}f,*qj_,-m5g[ZބIM6%6$O$` q962PNC"2uPFkBT$^$e>o ll#xpJK; {kнAKmewN/:OJCǡC\m5_jq]4]@}WXMʚE@u_\QmbܱxAYN٨rA%NZ`( sHImF@&?Hnow$ ybșˬ=KZ8*EƜNA+,pTMNqb4 y<*gv]EbrJ3AXa֖IjFJ!%mI2ger[s6Zxb@eGMD D\J @`1[ iqT֚S.^oz? 9Hf}`<1oH@s)KP0"dou0@ޠV8;NZ ~O x6H["AGm%`X`ΓH?RhRϖAAq8^Rokb{I{IFlӄ?V1iQ,)1S^lx2@Lmp6ݣGv>KUK=ml+|g[؄!QP$"xW0/SRJ)}!δ.BPx'K0V}Vȶҡ{#:Aec ^"K=<. ID?Y$ &Ola?Vfᖀ:=qTiQ+'Ea ,W, Ю E"Þ^d062Z U$TL"NK:[ <paS:Y<0w@{22Z-4ׅ.b ˷{,B ^!&ۆ{CiPcբpǡUtk1:n k+nhRK]'MN2XOGs7~m9MF>яf>5P/MtY)cw4' pq*8JgշhExPg Oޙ'ŧ ¡:g2I$IZnxR9(~VG/&IJcԢOd 5YE֩FPʼ?DmB=2wvZz|T^okO>}~?= fH;gɼ\Oߍ|8*2#* ]J;-eSt 6H/zW+6JJ+pVU{U˱HY`>fv \2Y*qf+VYd{̃fY^!pfy'۵,'=jMIF1a>`.&Mddvj\e0 ,e: 4;o)Y?Fjdv]HZ)/(n_ɷO6u]u'wm={3ayHD^$4J(\nMq\YL{TzuLv"4&gj=5zoPN*Γw?Ud)٪q%xmPN9/Ic̝Tl%,{_W2`~iܵGvAbp.vVcjo~Q(^u">`xӢ[O֮q0_*QӛGZqîu>U:HrA䲕Űt~.Rۭcsyy;hCg7Wud?omr2'%22:Qx%YaYUN=2|պ~o3 zFPخxNYǮ-)~$oS";5qs2IH/ZƩJj[+s$+V[tz9)!K!uAge s 'o6BZZ壝3Xrx Vj/ 2$pZjS6z\$FQdOI=k],>Y0ԜJ&|J'{XMI1!Ku2X uf](TGlOMBw_KͲnG L3RQ/3~ F զ_Mk\4@95> uV(5 I&jeWi*e hLyֲP\R{ ]Sb#uV4 ƅ5 -cwXk.4׀˺. a ݨ VX4o:TZ{΂naUhU%`}5 eo ĤT)Ss19'b nHWL&3u4DfT @6$+ Ns42, x2&d]g0M De)ΦdRȠe &;.jPAޠȪ\Q?70c:YomB9A7= qn0L\)!bD*pPRH 3k'D1G90y+XW. pq$VZ!hަJq`(q lseԸ`Syuf,Eyd(_uV yj2WڎebE.5Aլy"q 39yA9cthwHW̺(NcDBmUsvvǥNҩ]ܱ]-^F$>Hp"P ; o d9@X8pi76[0J$W{HV*X(THPjz/3kyᜁݶO 컀:Y=9"CO6XV.צ/BKzv)}NAjz1΁6DD-10 zvP PL(۽%XRr*a3m>%(ZuIBwH ԁ25X`kVPPl5Nkhg6XX-;ݑl,4^JЁj]!]c~:XTԙ$@fj2RAJ+12@!~ЃBΧ 78*4\J, Ұ" njtM"ϲ]@rĪM`k>Zl`%]!uf18Mh% Pf%BJMtKTр{9BEx'oV@ Q"m W|ZR0 1dkJ`=A;w+ϻ Abhl,xvX-s)du@0u1vtrhѓԷic+q0m >;-,V Ӛ{)DzIId05AI;C(Ofbz5l|YiFI*]ÁvæDluPmk[jx\܈ءBD{=D+Ylw]Pz@ AjFQ-@OQ rw`-T߼Uİ%4v*' ('Mm2X4M57) w(^ +U C 2ݡPdQƈR܌"1acTu`\A3C&4B"$9Tr2P`Fomlj6W+L$󙅱c|NN ត:l@@r/g 9ub'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N vy@Q@ <''8p">'{Su!J+ :,8g@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b';^'t938,]u d+1O deڰkt頤d'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N vud|NN Hp@Z<RHPv}N ]4b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v}=NO֣h+kPr}- -a=PGW/m-ۇgj#qQF'#)V1!+נ}z;ejb)w״8#G7GǿcxPۋn~A'Ql\ IuXJ&gjr+|寥ۆZqijW /߿_3o=wF tl:UTRM{ҪzL9$TU=&r16oC|Ok,5Fn6۶3z~fOkJt`QM귶z%pwvswqp ?q;nMonomSJ'o+"ÅPwKt}rQWiеچ=IuYձ:Pzj?,dnvַ!A{q{߲7v}''$Lu royfHDXnJ]m~YûF[aݝ+ߊt9blrEҊRPAԥ-!Puyj_=u0{P^кl6n}n{mVdzAǟ"{qz\#^^Ln}~ҝ o7w3N|1!ᢴT0&)- `'S.kmw@:FzÛ랖Q.ʽrUwwqv\ug[.瘣ɛIK-j\^#t.%H]T2@wY77lި-LWˋe:?oM_soz߿.O^W6pg%;vuz%~;6ϻ7؀S?[->;rbߚ|]XR·_ )BM풤lH}t1jA֐2[aVd-͌8[8q>2ΖB;3ZϊhlČoiC9ۇ8cd==7 r'Fآ0GB3vkmz:;5 hZ1Š!+ӒZ2+bkZi :gvP`Swh;MlNN̈=[8;uOb>Ԟ-;nfv`otGk۩}GMɨސ*f`HuwQ8c0:JV(d0C:ߊnXSj* )nAdͧ43a<Ev~ lPD$9AlFCed ^5Er9dmp=8$E5%ڢfMLuW55b7I<qoh]nΦseK:n":LJY5b5b6vkڂ^fq3Ufzō}Zʴ/f @FSGT\vGCBEIE)zxx4g?c_}3C>< [D5/R[o|rhy }ӱ~F^zy-[h#Mՙpˣ8pWyB5@p:5O~$~6aah#=0GXlV`vY\jҬR$yfҺ|@Ʊ9XH%͂lb:C8aq^t]#! `'O)MUu]MOkwէa?M;#zr15>|FPaS3σ+T#)J$xhFS2b>K3'݁`X'? z M[6-5܎dK5>r|3*lqpU&~_V.; ԓɷt˝Ox={ #|^{&7Va%w$k3D ோɪ\K>PFOv-Dc65nad #n(Nɱ܁07k?Ѯm2S(lo!ܤ730&ۡo˜ruK:8 vY*'a+꽁 0k봫[JDH!]z &&cI!)p4rF1Ҁ?v\4z Kc5Yy/|PG$ ut#D.ј)Cq86K ZFW6v8DM$1\dMi ,PZ yj٨xLrZ̧v1zO|G_=C|Q&[ЫXS]er `ҦPxw{rT2wgf4o&P`D\w׊*!L ^»w=f\ɻwŻ%m99nikJQ NRZ be|앙. fɝVo^&}O%\21.| =M&N$Ոԕd.)8-2`]R|Tȫ]1ʌ|Q EU1D/{|l ܚ pIO"~ao׃NᛇNЌ.nT{Fu)yQ_C;lES~ 7H*K v(;&0> AQ~b*\ \.ڟ$u]uD*4=S굌$ȴf߁ ܧں]|JbW u1o% ~9OWgg;\)'$ٖ7%.xqSܕSg ?#.UXT׳Zjz.zx)~t) cɹݯ#9Jl#s=O͏cُ7'XlOwCoT/'>Z%R1р8C1T @^)l G@!+nD"T$0!S&1jvzY=2GNMGS:W1`ORc%v v1X%LGd-qb-H/Vho߲Rct\\j(SZc[ߍ<  8g`8k%l@ưKy)EF~Z{ZΣ.6Gct4,XPZy+['TjWr~1NƨL}Y,Qd VZARѯ;'X~CF;zRc'8 )mK}vaC K"% `E!aicRC!fH rVLDSJx-`lRَ*#wHu`S~ES%*f G.᢭c^  `h塜}B䌂~-Ĕ#"UG 4*-wHawJ:UDT!sgJg`  8L<#^S.DpCLDB,,xGI4 NoWF7أ-Ƕ#ߌjذPWZ ziLSϿ`e6GgsFT 3dX fcra93a,\o۠cZtC>:Hy`Q;XZ}u1 y2+6塯u p_^Q6[|+֝&Zϟ_GGQGioiUFUFRKcnڑG6N3[/k~ܶvj=l{JHKI)Y4C/aZ_(R'aQBƒE ,=z/?/?/?V 3)s 8|IDH8 D ± ѐ; 8&p}H0}xlЙAz6vvL3Z8&zS:&ľg}v} vsXKێ 'iԝ͔vz{_;ԯ/Dl:j2*y0XG=S-oF.JO^#F#$YnĸW bx2!PJ S:Cm,%(#HI@`gd.Yo܃hiwHo*@r!Dn=ZwW9J- LE$K/b8őan,A6JL )cE'wD^dR"Plj=UڴٍpViڔ^jS'5Ԇ)Oik򕪎 1jW)tm4ߘay"%/w ^x2# .B 5%B\PEAu3%[AdsU3k~>vD;F-.JC$%Y{s3uyb0.O{Op+}.1UEs7[3]GB>w~0:#x<7?c<"N8\+ꮚ+h)GYu` 1@u4_: mk::p3m`24` <0Kn4(`vAͨ ޔ\Q1~PLpj j 5'N^g˒7o+ 6g7o<CqCD20!]H; 6.( IYoӫ#R__Pó ߀9|j`4aϼ 8SZ# +hSHrS<13u唗RO;NˢzLi?M6Vhgr"ڣ,I])Ɉ ^WTRJ~O_)_rKQ5/ꓚ{&I9ɐ%#[*YɨE\4*gCxgI}>GZڇR48 abj&0Iù~Q{ǽÆ0ɐunG劆Jz(&sm*#=/ ِjߎ&}1UCWH%CvͣNWM]3Mx2,:Sc)iДpPQqPlO (9(/vAg'0C}& Pc~iJ^(X;Ny ?"ap)1VLIqZFeZ–`V5,3:Ύ$hB<#<1 U=6B,4Ef)$OrS4u d$<҂42(?|/J4H\s!gb!@;(-LBƤɥ^ *arcF~iT%kKL|ڜ:E(|\CHҚLonǞmw-Ͽ,l MzG] aEʆl 8b)컌)^F=eq[x4c#[~&( 8Ձl! ;3%9$^dQrl$Z@B5Q.:AQ:n@ZL7\ imꀟX]qds-QGlpg_WE''7g>̩2r#K#%$R7]߯PVAO beEEp+Fzѓ1I*[GedEڶV4Y$C)$]OTdK1=ŨbuoT"Mt6DžϑˏNWᗷwJ>}߯O߽}RsӶ)q)Nb迾iUgCx󡹆nl]z;kdKnw,QzB ⧫ϯf0zjVzԡ ٔdV഑/jm9tYkY!!~#D#torkdVWIpT}zaAդ$Sg,g0ps'B P[%Dہ[$p9sQEiq)5: (R^{e&eG]/۫;TnuY[tzםqݑB(roU?9^dq <*[&(AseB,U'4J@fVD|RN[K_]biMڧҭ@R+w.pYu6M'6DB-ME 1AxQu FƬӮ} j686AR7C5Tu\:@SD) U(*/| mE ) <ǜH:u"7ӅRǡ^K^lv!شCʎ7mv =Lg㇫HB@AApAc"B&MI&r@%o #i蔃"9P*FVG#PU!LDb,G=[d혃;g=>- hWq&YU-zEY>f}VH!QH@jE@ ҂^) i Kb'BF\ 66 n%] /FBD)6R(6cjIVA6RP9V F^I9E !!M9T]CΦs;Dg!ŹNQ Rl}X'QrjQRD' ůk'Ľ4:221A`{f70$zsk`fnϜDyRI9L$DJ\ES!&ZIAKA#S>(ub!HȑjPIEXfJ‘qr 1*-X;Φ#^Bs -% eXL%g1#ZjHEK))CH2FԳ@,~dL'Jh3U$wrA! HΨZA-?zR'ed9č? ``͠+˯Ua;O%z`c@9jT0j2zCv79'TiЉh$U!143d':&823b% z1w6ۙxfE$f9D3 w,a48;nFy͝dXsIn&E']Yjt7]Gwʍ6 j:6#QayTLl]{[,ulBM8*vu}\q.iQvSP\`] ȥ~3Yaɇx,o>O2;9UyF?7ՠem,|zE,iѤL.I'8@,jsuRlne Q8>*" ^./ ɘ?wYH{7/wp~ww^fHO]P>9K;/^,N0'zw]-Ȕ; $Rr] 2(ZdSjͲɔeJGuNVg\"vWy/oreL_l4hn ’8,Q3[ krr#N/֕2Ӧ*ฺwH?F餈3F%s^4_y?yqb/㈭27vjZ ),>IB֬tB B1a,˝@؋{(EE1aJ*SM{S.X~Kںr9n"O sN3t+ZIY瑠PΡO}(w6L/wm9<~\))JAO!~ƔL:FX!q8e^d,@:J<`TfPJBc&YfETՎFx:E rv--Xi#1H:Mgj.͹ \9ZZZSa?Ħ(ÿms]u->^Us_8.l=L= \{00uC;zt SU6F)D;MM.x뜡DB0*g95dN1Ll:;oM|J$He 9Ge b2iK"CCk#  v߂/9_!'jUNxwUC܂y4]-O.\@e8498 NopZ;Og6`%0PD_:/Y)Vq_>}\fymsg7l[femot[ \AqX_n_v[ #Pa>s;m|,NXrի?55$Oc0E=̶#+3rHZQv :@;tz2\= H+ITZY;v[:-$Nm:I9YuFvǧ_a[ϭ˦q;-.ܞpnb[3jݽmE7o|#CK-C.v͚wy_a٩ 5bs.D/ʕ1R1|ĤQHf m9m#+}YP~^g$g+j|dkH i (NlQp1uaj`.e bQ0pxaR[K\|H#lE N&xn9-T&Xz2J,jep9èQZK@wxp󌞠 γ0+AY\1Пt(dˎ>_$Y>|UYҌƓ?^edϲs@ERgƟg脢"G#`gŧWRX, r Ԛ pIS#0ΧpjU`2dt>|%mͨ|vFYy˜Ӭ/C=l݌ nG9 N7p*W$zY+Pn v"L(`,ܛ9C;q̰:>uD*43Kk 'H24'Yiݍt~{*?aOdi$?jҌ=9IJjs񫩄[=| leS q)MqC?+ۅ~￾dh~5+`V%+..KI+~DTYJXn%$\]^BlP|߰ =? _f]])f}1Đi a&ݠXTk –8 B *y-%/\xlB OƨU1kQR$kYۥ"gNJwOCjĿqI1F C7;,r#hy\I+o7M/p(1䭩3=jU RYU4c,TGREx+•DM7{ jU{UysX2&ȃ.Q RHmP` mb@`p2J88#9 SF~=@־G_4,lÃ=#izo\YRZ9? 1<0smC sߗC W`T[# be}00+CʘWۭ:x & V\j_p6ke-e` 0GM,᩾jnDh:vF6x&y|4v+!n 4& !`O5UOy^`@@.N6ܥc$) V`%'V`T3AŸoɤ+&,'@\k]Mbf7͠ɺ+%/̶aJs(N Zmnt@G|Q{rZ=$k7f .&^=qqAEVT!Q]HȻgMv c|45ŏ@x@@Kp7ez1ըt6wg*t1PwEA/<; [\+`2_W`] 3InmTwiؠjvi{~d0&Ռ N36QE9E2fBA_-XNռӮeOD7,VϬZ-o>xn+kv6 ="Nt{#ŞcXr/XZ޺,2T;$vd {|ߎ;8!ZzxB7xi/3ΔH!8s.\T>F `]9%FԿSiZQz]<ϋpy5yk4ݿo,9D;YJIYk(i ".c l=s|t ́͢&FN"0\[,VEOt*d`(Ƅ(bdZ-3hQ /:UPc7Y^GZ&b@4:&e+yaQiK`@3 $@2>;VK=D(CLvaO-?CHZ0 [NH$y8JfrIؿhziAB R/yG5\jPx57^ seAeHi16 :01t JI!Fp6{N/w|Ll8v657qOq؏21^"4%GZl9uRtI*Vޟ.%q(&ƅ'ppPRz]"H_IrɓZ#"vtLs eSDz;RL]EM58셁ΫJ`Q85ia!zr9ל\wgtt^&Q݅8Ώc2L6 KpMLJ]}SVWꢓxЧTFs M(JMßWpV\gIOl銪lxr'^]^To[gkQL2|~1YowvA6,Œ[;7 8#+1~a2yo2ЇѸQ+ǣMar ,W%hE6t(r{ErZBʔ)`S*@{,yqg㙗EjiRM~co8l!Ƹ6rN`u>E!LJN!]pVEЬHӹNsQqٛQ\vݒ˾免\Ǟ;TKعЕsY5QwuI61aݭ|>S+ǟ 1'H8,$EIGzFS)hd0JoT/dԆ/yˍT6ta[׽KP琔s(.Q+LF FY m&ZRdA8#SS Nld'qγtd˕e(6-K"zɮXel8kv3r]m=Y?n|O;7guM ֻֿ&G(}4Emu:PamGןYwl]jfQ<_W>xwx\MfCY8_6o07?6ĬM.Y^Ť]MW#Cng5vOMBR+$uFu(E8=לtb/rG?Ed"`#Ua J(K/N5SdY3뒐>)"Tb &e4IW (z2PȚ)fWqi]]p͸JPFjT{ Wo?_՝erݟ -Y&r=[_^*یMQ֖)h GLGt" L$V2hRm)R AXy( r9h5$-=dQ5&}aj͆s;2UڱZIƱXcAp7ljQ\췉n盭>␧i|a~Z?sv m ;hcSVa3-iU.z'ؤfOIÊZP=@@AMe( KTط=eta̰B1b7t j7cQ4Fm3d޼ .SAyr׺ H#̲ .0&EU-6!omCfP"{QebI-)`a1׺b‡T*LuA܎S=8Dl&""5FDqBK i N^5uB]SͭJR`!Pտm&g}QIW I_řBM6T1' H#uufù?d^g3)9mc\.Nx>K-<>OE>RREjKU k@bLjQ'\|\O+8]c<<2w@.kOu_#J2cxQlNX-Y@!jSC`SN)rS;xݟAàV_az]S3 bn}~6/wxx.#{(;.WxuzgZ5"5$4;r7.1}VP9Ek~"į|NGGÂZYDԮ{~3''m!GN|uX%]X:uAҬG:xb1/ahdr?є%cY2 ʒ:3c"::rTR(L:/4PGR>mdvnІ׳_DkR۾EXq3qٗHNtҕ(ZdvA+} YX(`$5! BUw%C# dA cH A (JoKzy!1!w՚ed~Ŗ|<9]z7w3M3" 7xjY"D7>Ȕ%Fy4D(З\XBU8E ,'i2>Bτ< yݟ!K֐aV R"Ski `IʡM&UjEywQ]2r<}8#JVr+*ZE `C҆U@@7O jssbj01t۠dNa ^xht}12ƭ;[$E$ིFϔaم(dDŽS4)J?& 老4<@g(ze*%ID$c5|J5ٯM\%MM-hJKfL+qdR>;OIvzUu̩Z~VKg$-RH4؎':Y.VKY#n72<}TF{n?47,0eRNy/}Â2+)/jF{z rW{}J(wx?5ݞ~kt},_.iF(FC.NmmZ{?=c y~^/{nCO.H㮣i熎j\}=8IAط_laO.s~PZ]z¬׼_ou~7Bfxuj񾛂;96]\rmz]} ! C t#ru) >-KX͆;OmWtτ>n_1u/ܯNj"w)# a[}5aS gLÿ7gT $D(FjS +.EY &CT-Bi]s+j9·B,ZO۳oF=چD[W  W76bYR1ǮFtPRb1cc, m܁e:˂Z^}.X"WPTE!is63>bS'vX+TJd tt>A .Ȅ02\mɒȭKV5Ύ,kmM_zU{2N5B_u+5moS}ƻt[ˍ+Kt֗)Xਰph|d`0 DI.F-SYD>/Gx*Ư:Х匡ƭY| Uڊ_!)k. l-S[+7:8&ygX`;}>ł{LSZ? 'T֝ό1%YYA% l1IFl*>B<9#3Fq O"05}iE2?sLP+"J13A*N2(a-ބaiH,E$Hu!Ģ,P5_YDXt&| 7/)cv\R7hY\ßkߔGx#Gr.{|t/C.m?r(ד:C?_ذ:үEnEg=-hQ<+?Z+c:PeBjѱENGa68: wc 4*յPA6wװ|x@cv2=,дO!%~XBMTȫ_jtzS%Vmׯ篮1XִGi-UAj}c%'<Ǜ&w<{8.~#o.'^g:}xZ&jed-M|z$LC; Ė -ff:76s]T޳f6Gp"j5г/mO;n%nmU[]V+*zNJC<,>}vW뺜*Jµ/]E..6.?ݏ7ן}|RoG9%uwtwM۟vo6k[5A]/|v=-nPkqZϬiaoo/YQW~ף'3+zGy6x6JKֲ/ŸNsoB#`@$/uRޚvgnbI.~<ɫ j4`ǔi"&MfM&$!IRfVBxҷ^pXמہ;Ұ@*Z$H>ڤ?:AW&4risP;d;콻=ynځ&\wuHhz(ytl,yu ΍2@ \2]RP@9L`2@n;<'^}.2@+2ڗPhBM_$BFB 4yʒN| J`&tHJC-iM>^0f@VK5Dl֝!b2.,ӵĊ?O[*IEIYXOH%LF<0afmD⠹4z̭,:̩2,8XeUL)RhHҠ7S@)R$)SS= <8QdA&FV+GzYs"#ingͺ^B ;J P+p$@sYzQ4 RE̹+eP*P`XrEJ@ :IHS$gkWBd(R`Z͠VO4[Y5IJꜬ6<%2t4.e^`01вZA;$G,d*bjD WFDրI*SPIr"+Uh瀭ZRcEm֝v/]0,eJpquPd}雨X0csٲ?̦΋L};|pE|bl%[?度Vy_L XDBbXp}}RD.2E붎lQԲ#ܝ\Tr\rʁ+,PHTkج;[vXN+ƶ QmGoY]f2]2;_ЅdqϏZu:lSHtAɯO)"C}!.@:7ǽds02ŹcA!5&FcLFVc!#P׻9ɂDٻ6$WN&{1k1~J|=' =zG)MM >H >>wW+N}(qET\{'J0F0 !SbSք$s 8whGBRGF&F'( Ddy*XӖk녥ď$ `'F(ɢ.EF.<9L.3)DH1}  nIK52S:gL:cR<7VjA͎:;b:TO/KeU?O٬{Wl2W%\>V*NIJg-ijǧ_=ӯ/J>ꞥw)T]V(_3xmԪp mSw}w{)֘~ӇFz% ٙ p+귇 {[tI9[Y(zxޫg(I#rX5weZtɬdj&]5t@q[\SP!W3"M1|~R5krQKcv*MϽlf4|Ɇo{݄G #}U5 5ԣG>:-cּFR߬+r+PƁ;=7S~ur{֋LZbNYB l3ۍ3YCFuݍ^J\ݺ o{`?oW~E{p*{{]؎+L)*tOjeZ0%Ɔܔ wH?Q |j6:[)^ſ]*M4J4tZGs\[`/cw>?LqZ q>~0a1r Ob}N&8,`^"d۫˟-^vn~O.='L ^,v@*W(oa WxkꭅON'[n/km󾝷-`k^ZGu?HK>>l^$lV%Mkq2C$҉`ŌXyV n]ŏ]Ǐ՝ŏt`RGA-:Ir_ $uD 4Ff%4lt.e,+]֥Hl,"+!d`$PO1&d_E:59CPWCElbx=d}.-4=o6瞠 cCF)BffnOf4x_+*|-εM'a-h<&} o;p_|9DMJoG * ^mk6Y*r}Zjf7_^1=[X}o4c_ݘd%8U*ɯ D|z|g i,f1A%}w_M/۶2yɖg1"*?olFUI 4riM]""(iJ; o^PJ7mMKKNP%3|РU۳c nN|2]sjŭ*X;X:u "`6B;slo6YnaFI~#.-k_/E5w.&|.u̖jGSn6[( VG<{(b]@1 ҳ@!Lj~lw >FsϬ<-ɼ:&u7M MpDdd*o'*O2@$Lk.g,?_8`xSr!}W{]l﷯?V)Fu Xv4biVъdנ[h⭓Em``:"Ҳ)OZ#[}^[$k R8'.HI(YhȚDMmItl*0lf|H(jHj(ZpkXAq*#K9Kg\, DȲL JdD+bMB8Ŵ,m#!*ŌǐՔ,x}`9GD'"dl9tԪ< [$BnU3d5[)cWP唩.%&!9FϘ*hː0XU9ج cV2L?#&"jUB(b} pr%>u7i~vB :YiL28aae^3&$p-%VII7DCF<d2n.sm+9 RD@0`PRB3 a uS-vB$QkKwhtayF e4 a[# &` :5\eb/5zHb3Z Yr T@c$ "2yjVnB*a{R-C;ĥݍRn d#`b(OmTzeRC (d 7a쵷@1C;bBA؄CMP B@LIl1p @gRvI?]) a-Xe6SUzQ@⬤@PnhTKB(a.a]`d0rVmKI+Oag/u r*<R͸l+akmȲEЧ ma E`|ٙ gLD"5GT"e[-TɦpGv}SuOM.-%u$ӶVlB[L"bqѳVUQc%Gii"j8(= RN5nʰkQۡJΘ @<=np}[[d6cVGq|1|,5hpD9#Ⱦwf>:WirB#5kq`<"Px #/Mf@}2YWҥ`ruA"X L͇ɠ<Π;m _~a=(9@i@2QӲB Q`r'iO,Q<{ ȋ ,/x7VTlh ^X:N~T}5Ww%,ܶMxYI N;>m9{^t9 (T&o%!jUW@ @v3 hcBUBGm` @;k1-PxW3K:vU 9bFPt;ohdr[nV R=K%bBR@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)z{[yk5mw=ח}RrwW5;@ "iOQr oG:_4&NGUVl< G}m؞]k:᳴r(gpq?pWR.߈y<>9ѵ+[]R>}c* Q\#I3ƐJ+%ه8$}Z&X9>HnGuoP̡U&.1i?V <,j Pa+E`2hjs'hof3gWplއ7x42=>y.N xy-^χ#.ǼVD[uV JBRiP,!d!K V9vD=fڙ0ō7tS8bq~o?/ki:|}7z-||x>"m,?9\"ӭC9;&FLp<˥pX Ǖ*#֧ !!J7Xt|4mkaϸ% Ī|{ǀo8Fmx&Co;;: غvMYn9-+Z2(˰_W1deڌRA>8@zEmIlOT_ো)"^~aþ2%H3b[aƗe*{  'fQlv1@jdEK_CS($G լue36pD/R/3XU7F\֘wiNўZ~^5o l9in//뗖 8;î?իeK~yo6׋ӫoY{`u>/ ˗[{鍊4 yvq#bhSN\uG8c[I+tZGx#}Ց)Y"EnȽf^KS Cȳ7TVVi,qmv+*/q֣Scu3g8ꏯ ̮pT9bi0&#M=ǔs2:@JS p'197AJ!3^-N;/Z6TQ9##L}ERx b*(تHmV7.O'Ojbyx{y:O|4;s7|vݞ+: .s[gGItԨj(uM%"G~ژtJI8J]Dz\^?0m rq6}>(ocX\׋Vw}wӲov́~K6$9cRT sM1dVǓ'd ̄} T;Έ?^xC n.ogCxzvŅ"YQEK @X6`EΉ/R2cƆ8w(C9:sZšIo>2 Wayc9vˉV.0{Чi\ilm)qGm~b/'4)l2nOü VlQ* Vo;44ER B:]CgnJc]Fv㸳bN[Fdn`<=GQ\6g|X񐥵psUE\L\UmҦ` x2dWCAv3~;fRK}w0Ӹ? '>;A>nn%vG#r.sfMYМ,K(سLdJHz'_8 TY.SoXNU6`[jnH$rZїO*UNhn]3Aؾcw3 ޠkTs~;_~61пX>}7I,#;ݍwBzelwVjB^p:lX ɖåa&W s-r<|Ё%&YDX+aFX9;º- J}$nI`/r\ f/r\Uᕲ+qCG͜ )`uZ.+F?t}\P:8+3YS1T('.3K=aIv"pX JgpV^ />` b6JZjMqɥNyydbyt]׬CITbeH}s}tR8%rdDcFKUʾ==sfU5}s 5Gͅڽ]^5d<13zsMfn.OӃN~s:[mgl_s˗GHa'7kN;O2n\Q ;4Q~ݏ>[_^_ء Tq*AQ0(^gj rNǽ?wֿӯ#YbիMʉ9k @kWdM x `VVp+mT-]T-ȉmkФuT\K6y+yl?|!Rg}.)0JZDV,_0Q]F8vT(& >For=nFMD͏|>ٳNL[Qso iQ[h&os ,mNm99e+qP\dCK!fz_-||_kwxy[i_{}#tڏui;yb*f/^e,V _g\9T^ "w-#AfvIpKHAv2k _ndI'8TS,ˢPcdXݬ S7rɟf)HM|VJ-!Cb!QF *HU8TgDFV -ѽ-I[!R|*/_ojp< pIT`Teo|hz6M:ˋӓKg7)1#D_:X9 Fe9^oU\lGoS|@0U( ^GŃmYQLUܛޜL_v|ncq?8 f´@qdX jꟐAtML6 5lD,%k Os)"D1 2e}[غn][o'9v]]|he- 6O(?4Mn.7)J.W9u]azO+yЬsOj9CM4n8BHNqiJB,q͏hˠ@BTHjYhTcS=vkv\gk";c+͜P`m&TFX* p؎rBRRN]g$@Ph˵uR /GzNGP#!F;vuN3%$&; .A7V%Qyϔͮ3&3~N8~(l䠺*$E(}qrw=&rT>w]U`44&r\k?a\T'͟GCB仓5Z'/'N2'''Es-~hJQNÁ_Y^\נY (bfl&pSrqeOzH?P;{OȺ?Ey#G&S[4/Ü5O2`!ыV+.W(r-~Q(stIU4S;4@6Zߍj5#01.)BbSc/f0WԹ1||{ca@hrn7Bڋ/χ&7Kf0 sdǬ"HSjPeI(㈱鄜^ʄSxh\G>C[.5Ģ:$ 2FHTw9|#hA{X[wˏSDPuT>F@ (Fg=b>,?6-f^1;W .'6 JVR+rPuq_ /ͪ?ͬ,û OQ{,{3y4Gy^ޢ v^y7t;s9#Zj\i 3OW$/,+ykI6SSc0S옄3:$b$EF9gϫZ0Q1F tMRJ"G0DhcI$).AYT~aPȯk_1׾᭯Cu m彰-#+mY XPJ ֗R^Rٹ4(D*-S Adpe+BȨ9cïzz%Ѭuh4OX NPG3"H@HvJq^p@;HAGRԄ!pU?pKLXUy$uz<;:TRCb(`HZ&TR8KK qUM4W RG0-` z/DΉ(rST:gD #NC@^[,\8?m,i;_t1yM:8BPB9NUAh8$2"t:AC[ `+=BcHAmf!D)gECD >uhtUY^W 3biy.%H) 2X)=%eR3yVieQGiW͐s3cs\y Zr9߷BxQΦa6hIsebPZ-Ea ed{>j e)Bmzń:H_=zК傞] w7V.e)!+A*aT:06p_*'Û4 e[h< TRxb-C%9MUƧK5vӥ *5sH +TB@>l!NJx0$hBTj;k_}pIFu6cAza[Ey{={%'Pws.2M'zE&z/2o)Vqr}_PoŤ.ju?J.SLǟU魏d+J n!SBrT*E0`fH񌧳ЃVky4?>OQHAuL4')X B wU bt͓:a˵(^=Qg9'2Y:H- ϓ$h(A2uhIS@Y٬?'ch2g] v1Eknv3TޟɎRJvg\KfJ|!J#S*@=:\,zQnhP$[%(@~^b_B"+\0RPdVCy+kTf|Qp?L]{L۶JĚ<瘵Z1}KKOZ$(]>hѴ~%ůqT?eY5[g%^*(O&ud_F 4 ..6 m)#uaLDز8$\5SnqXY3Hqu >zgCRQ)Nj-4kJviHuLDK8R`]V,^,-ڔ7#s L@,, --qՉԱ٨Aqtpj-E+mHe <@6> 8@],AS"i߷zK&Gm4ꪯurU.k6h~iO=Aڨ$5*HknR]|64kXP$]m#ӝDi\nju wP`bqv ~)?*TBe8>G*We6 fwƓaoTr3 {ˏY'@ʒA1g.'`@>0ʧ{IQN)93Z'G A/bLf9&mRVzT-+ݾ=MY=t|%:]!C,F%Kȳ@MgKC쌛~u&4Km5vqpIy-K= YE"\[,VEOt*d`(:eJ2Xڙl aZrם WveH^GZ$H)@TdNF03,*͸Jb,D! L7sXU\b}mȟDwLX!$px-s$< ^X)4L|-? 'M%9HThs56TO Ra9_IRMOjnz4ׄ;6(Y VD0:RL݉ޞoNBWgJ`Qp!]ja!rt-lH]"Q0oR֥ q6Lw vi_N7ŏr@Z*ʼnۦ X`' B 2[0|ˉҊy 3?'o/&/ٚ(Dqc>uz۹.w.Hz?OE̗צ7n3jFԎn!ÔMkY$p } ;X2V=ٹ7,N`Q;*AGY7jZQdQr )D?,lדpS|~}ŊN5ԩ+g3r<_z:~o/1Q/ ua`8{O{M@=&pg^вajhC+䪧ŸZ4qJ/ڎIQz<_uF.%o Lj"LfVI+|6*3ܴdiY Q v XKB6dgIAn$MxI5IQĔ G-a 5;T4J1ՓP8={ !b̲ȈeS( V[騂WG-#Nq{̽A6F_η;_ʢg>˰}^%ՏR7}Ԁ3@>xQnA7uȴEFASnjSE<0nqDqF"@d2:#i}'ịUh4BkeF\:ڀmT t X1 Ʀs ^s<DZ8{Ş6!ώAVlRT[j$ene"695݀3GI>Lei]˻?/d*13tn*;8_mo )j:2cs4;9؊uDf^!} CJ۬gά?v.~?ZL"`Ge —C3Nƽ{g:3ҥ~L?mq|0ĺl]ebՑܝ87n] qN~1m"] zNP9  $i1徵Үb̽Wּ)2:{Q3yh;% (b#G$o:gJgC[:$pp*A4csb}-yǶ?Y!0ArY2ϐaFX*!I˕KYXfrHg~0]M(XS-fEp--SzZZZ(SmS_l3UnQn8w|VCgcT0&B~M,{fߺ7xwSur v]_xn?}l $˖ \pAEaݫ/o^gmdDݔHOG/ȺU˝a/Q\0żLK0-;wUxH6ٜasi-iڨj 1UԓH}PLB֋H@T8HaΉQ\JLFh:ō8`S!<"bK(F[ K)*9A M~(8_',5 YIxs,I.''=6nOmd#DV,y Ĵx* ']x|ZƑ5aS /EN v"7$bY!j[z 0PN#;ql&ȈGU &%Pk C(#T AHRVk;cƌL"^ˈiDk45[!- Ʀ@g.GL0R^xnLt9!jJ7J٠^II\QoݓϿC ulۋxN B'čYt4tJճסOI8vJx㢲Vt2.j?\s':ArAeLJ׸DSx)1nʷ]\ή^v|vY>k3.VPP$HW^ϲT(&?HJas|O꼹 pϴ6[i&aǥoC)ٹUj|TxWvt52{ ˆB*\㲺|gtSߎ&֒dMʏڲ_g[OrFI/Ԛ]^+8nv1ilD:Sl{NgmԱw*=ݴwz7X|#=lUO7[kކ!ow˯4ÊS>zCNiV˯n{6ir+Dkr{d&%w(r,aZKY("?Lyz>j"BI3d=A^ʘ>QM[y5ma٩ևbq.DM[L5,c I,F"F:D0\1SR05ql0٬w(joFI*G6^ԖiUUn.o)TkknX)C[υU~fSdO;53#"uHI= H Q2hQ* Fq_oq{O鰍pC{ɟ CtH_R;<^ ZN<0ґ#¥8c$dXtkc5H`6C.;l^}pL`WpPSlN_äΦ4[_ 2EP v-u,N\ }g_n| \%:ݷ+oV=^h`kG밁©7!qr5BdRTnDk- p̑:I>p =fFR~3}kmdv~W ݏ0J~ Iڒ4*y7?Z :ªWk !Eշ2l Volx!w)GDsRbI<5bg竀y6ޛ4\(B,A<"GS鞬l*%霣c:C4Q@}Y>Xzt=Ftr2$5je*&$dC8('+eldlzY Y{K_[mn^iiLqD6ȯ+Ew.!)+筭bPlUXYHFFo=ȭ9j6P c62u~iAq/FTBS,K܄iV`gmy:[yUT6-ːqR!oCOЃ" A%\:t%k`A)˼fYKth3D~VڴϏO.:6e%29uȁڸ .eW`LU+0ZA0uN֨aV$t2򐳉F圔 %v++ :UR@[߷P3(Tg[nrZܫPqYL//c-=')1+W}klv:bECHRPZ+<$'~y` -x)Mbmct:x=G= pA@ j1KH.i7vROiNuSx4,2*pqKwIϫU2WlX *LO3֓j摞VM(S ke'BpQD23e1K52:=&mM^֭/[YWk^ۺ]GrZ,9S<9=OLU6*ʒBr)ܻD3w"&uս)J.:M?~ڭ7+^KʨQUʒ'8\8-[L#g%UC\l5tƄ +Ug| A*'&TtBkA@pJ>!EPu ]͚䉳swm{Cۇ0/:a6a(ZoQB5JHKSJ1tBT<$ CRԳ_sY#bo߱+)3a Mr0N0eaD"2RQ:!E5vƣ ?-ݞP?kT:mơO%3Wޭ JH<*#O.c%.1("^#F#^CbHyjf2mE)2# rc< |;QPʤ"|qH8]&pP)+Qt:Y>iG|ҋ9y~ pq!ײZ_t"}F-5hyEU(Fۃ}kDEMHWf 8sN9A ,Q+tv>G5s<L"i9Hcf$KA`$މ)ndO)pkN4Oq= _U5qȚK=dDgLiKQf`xӊ'mS!()M"6^;=l }-Ent8gI):'D~K]("͠$B%Ƙq<0@6{mJ;K!J$EXL%B@NWIYe-% WWk3\ E&/K /)bK[$9M2L@{-;pN pE_,ytݮ9h jc$47O5̟dh[:XeX#ࢯ,I 0S8TN.NK6{-p99-m!s)A؊>}BΕ<3rx!S/cz.53޻ (fwi^͟l:h`+F2H齟iZ( P\~+TTeM҇R$I۵ЖJbM^KVfiy~jN)W7jwqh&M۴mQj Pf M@.Ӻ.k."8q8*5vmI1!V$_lֹI\{ gK_^u`suS 璤7܎8=V+핼̻yn5z}~@zfԊ؁_O?Mߤ39aĀ2У*hRT`ou?gL"q>2Q~=l i_P)Xyr*QD–sx8~ 7I)m E?-5T'w\I\Bl W ^_`7~њ^/^i|ǞOˆD4j>x/^ڞukߍ .&ONIA?ȷE5Zi#ψ:H/f"C}Hi0:)ZsfX)_o#JШBm-6 4Z VTk3{ zrPg6P#v-`Il~Д~`nxKI@m38GU20/2A&bHfugvTOAM P< !(L#($Rw3̀\y h z =HWz(+^FtBܛ_sۡ_jo4nq%P4T[h%eB֧'D#tJsY_]v!"Bfn\H )L..+NSZ)@EF&smgӹۻ% ]mhgP7]ݒWlϷvX+< ;֋vmם|$Si(Ѵdb\z( 3-(})PmE{LZ|yf+|=8zf+p=80Y=oJ,! !(+5a e2J8ڤґqHۭ}uSM9V-וZL1(ƣgY@*, !LQ&h|bGD"ܫ5xbߐaa[<:j둂. N΃ebc p׎, CU6yV*B\2(˱˸}QJ?EDY"옐sNaR,1M̑cDm!w!-c yYpE 8%u I3dy[:mxoG:$Ԃ 8s2zCt`F}fC1Y|--)5]mP &ӰrowmJ_2b ,2eL`(BdI#I[Cbw9簭lv]bk_;H/ًa^VF {㏵ MeI鱗ĐAfpY_䦳au|jyֻۖ1Jxbz~f{З@u%B'bQkS?ftٜYY^[}w((è(+8Prwce[g[@;Hk2ֵЧ`jl=hK̭u&A5^^ɲ@KIGZZظ$t[؂xQg$!8ɫF՞aRRQ*U+FL:&7t&ICFZ a%{o]l{bk"( &141㣏6iǯNGP9 ρƣ_:A)MdՀqa< !]{Ht]?^EIfLl eH.Re !ɀj.E20dڄ`b rp>?]E)Pl2=9|zEFZ'+ 'r@TғF `eQxSN^] K_#oC._"X\RP*U=1w9V;;xsNΌY mʴq'P2R*:%ɢS8%Ffd3ֶKV &y`3ٛhse}`&nugaNH]lf"(bJіEkDqJrvh$I:ZVQlK&dlTh*rdٟ@P 1gw}&{A%Pje(t.K/`FAJ#9xJ9@,{ANc7)}-?&!IDO ߥ* \E\A0#DoKՙN:kYi:' D6 kP0lPH"DƬˎj={;N^avFIv`EM-&cȚy/ZD$\.t'Oi2 IjCag|C;fU]:*FjDȅ E2lЂȈeeaH {&'QCvGtϪbMax|F[D5]dhH(%Je]Mg)}Ȭ9d2k4,dZuvA9!j0Smd]lڦ`#!YUL욕­6֞הխ0?0kFlic1* Mkm"z9E4:E8۫}mu-,!cædh+o 0CS+7M٪=i=<%ݸLFYt_bߚ) Fy0k0WNJA`.CsӁs^4> "`Q7*{ Ie)IN'0 "]+K($LqEym"% Dh fTIs 1z @RՙA5m)7kdfǶng)]B.hu3} .&E֒乖}HvR@J&.=KzUKd(>?YTLb69`brV1vOl2$Q^IgtVO0csіr΢ێ+3Mۓg|qu~3o<[Squ*w~”@E$t./5WgNсQ E"-A]611 g+YK\|T9Pv@Q5 %cg˹_2vgtex-`NbQ@f;N k1*hBDVl c;)'%:؝-~ǣ1EjwFرƃ>Xw;ThF+1Z2$MȿTRM0FclE;b0[g<%Td-m&dVHnjZt ҍ^B,ʲ , P@YD_Xt&| ow)cvt+hYy=,"XS/oԐLl8^pzL>ó~WճzuFw49놠'ߟ4>aNӳ6?vj:ҩ Nlͫn%9 sA[B;gmHʗ~;s([-Ur3 *z\~Mj~1ՏWGE\[Al֭񂛯|cʑ.cIjΓVzIdgB 5N#pAs"=OKٗʴ;A2:*Ų*+G)51&>ZJhLF~]κ^TGR"v՝sWf6Q0@rJE NTAXzD580y(ELV z{4̡DP@ ŵ cC/9|.M.5mzi8N[`4)oxpJ_|CSM7sݫ}ݣ_<'okEM=wG󜃳kHNn fĵ]3:lߌ*'}\@]eDF[$+!@*AZA*2(-TW/|ԥl\ȠoOsTcSu$r̖%ɯ6S=LV.zIgd%0hkc0R( t\,)ϸ{geg}"Z+qq7|3Z.}g7e|viIfxs ]Dyxԓ\-&7d̦p2@-׍24eIF且 lvKSڭ* d-Q _%,i?}ut3Z RJy-w1Je9ĄB/BK'2%C `0zk͖䁋Kw (zfJ-L`6);52H/[?'"䜔dK 㚂]9t Ò"o[½T~>_ ^)pry*/ i8+E E؍E(|u7 a|!9bdxgՎ !JEzf aa uw>s2!DeSfQ0{V M)AEn#*!HDePm8jEdYr呾JPwkVwS}hӝ^s=M٣8b?vDТ1iKf@l 9e֚<6c@0Iău̬d)R2 ;(NIV"by \0*rt%J7A =[Lg/E>̚ ,%fOƥb(iC*]W=LZȹo2XQ6ƻ- +F(pztOϮ`#<2@..e#jj@y`Q2VHɢǛR%zN7l1Ej~x!"e Ξ nj=fL'd:*7_}e_?P0;":ҙ>|pZiœ4y2O!\lz娻~6X~JRҢܧ-GX + JIһR΢`Dz RjEY4ɢычNFBsx,ʈϊ/e&LJ%|q`2;,gEC4gdv\-7fN0xC[FQ wyt)ug n]Ge57kuxSrQLy/. 5wh9Нe4ͅ23#϶3?ݝ[z<<:,owG g42&e,dgQsLLDe@YebcEqTi٭B`x;øHZ9cm"sǬXOUМ8c,hF*FH f V_ŷ_I VmZ_!JmkIN{im @8maQsy5Dx,6g(r6bJ6/tRRq˱o,eHɣL $+õggx*!KQ+BJ@e:8 %4bfm4\KO᯷ahD]8׍4BŌg?{;EY1H4] aRY9"irО$'ErG>K[1$$O`Fm(%ډH<(%XVh8k ЄR3G㒔g IfQ+pņ "\Y݅=_9]e(i2n|eP]Dßr..MEfzP˛;*Nq&eYBK<%FRdd`){4k4xE0ԛg)[Ms.(i B\f9.IpLf E9Kpmv> :yY +(ɴ'AN)Ld%pZ$? )FF!%댉Iee=6 ի{j @EPKϠK' 2)t%ϝCYvJIKs͝ fw9*PS^? v_ 9A%%32xb'HBXc U&EP*zV5'䞂M}]f: X4_ j`Q; 3E^A F=<2PZDc* Id"Ȳf/-kT '1@ i^vVȪϸAȃW 涤K`KXO1(ka)X5/u-R7{ ԔUU&]FyK]jL}zr6?Ma 6Qttv4ϛSu*fUv&FbNr$y&~alVcOXlYD'km#G,ٶ(ٛ i"KsŖZmI-Od!(H"@:'|+$gu܈28M@wSr))l60RD.| {:GDxgҫ1팜 ֵ"{`u8FaT; ƁFeXCB!|ɁK`SΌ,%ڢ\ܙc[yh;[|y6+T"~'ёekל/q nBޏ͍}ޏk;J&MSR-+>V>$[E4# :J&w<4얓8fvWH&̈́8b!3jS֔$wqBǙp9&6:`4P]BxU.*- 9S2Jp:=5$F"Vz#]s oIMBKHaIK5231JNa!.IRqLm--.;2SDH^RQ He5~p> G' ߾m?FNDF׃tP{Gz@팜 ]j]ʷ+,r%մE]S9L!{Ԩi>{vJ/_P|0_uZ~k9HBNj3tKZQ&˖貌 [Tnƨ+;Է"椕'5*0XL9CQI*(rPKEcc4Ln{{N rUh\_՘U`EWcJ߸PPIy:ɝdt2:c**+ +#,& VxϞ#>  JYѐ1Li.P9B Q{Ȃ+ Kfjd$6y1 >Km̱,|Ua"5Q!LM]wE@m\ʵ_8^V£;lWpѽq^Ҳ9: Ðn|׋A<$tӻ JN8p}4Fã|,5YJQ@j2q q!Aסʞ^Ql._H9*{ԟ{6C3*[k1krῶBG Bxxؒ읿7^ `ܽ?/ #@_`(U(m}Fp礎/^̧`?bkm3g8~Zn++jCF-x*+4VVK1-. U>F,Lb S; S2ң @4~V^r/:-mξ2'(i WYP u2a_%zh_}')KhU*SArJ15JVeOR\lJLB 0] F)MB ʱ丕Rg^߭Wŷ1exw"lmpN:u>ߞ18e^J #PPx$TH۟yF3/]M7- ?bo.#:<}/7871nV k9zI55MŠ_3v8&2S''"O`(b/0j;Cn{x#, ,,pz{qK<*~ldO,/"gi@&خ(K15i, iF goZ<<^V|9-IFHyFKכ܀|m"¯ s9-Mho@v^i?+^o4)VWtXqcB,+dxLRej.Gm\rV1Sje9Yb>2l@s!>}F9pJNST"w+r6Ř߁'|"޺88 ^(9qʼnz*7nUIs:M!|4$<:dXGV/DxfRb0uG`S{j%kW!+Ħ,0N*~tQy,vNx2w S;+T L팜J Ld:ƒ-qFMTQ:hl0cyhhtOZ[m\A>;J)% Jϋe-*Q7VA؎Y&mQNw!]N.O)!,i%IAJ 4Śz>rGQ|h_̦%t>kArvycN(}I%fھ37 9$,D5՟^tK[2z- 1bDuTˠZ0*!ir+g9TpBɍL}: Fͬ|îUeRᵔJ\Y`.6캻.D%.~fb֬vob[}A+l݌w9tfDf!e[mnMy4٠煖a<}w7؞xn;?QI6gvmM­pLnGnRHEfL'm襱_;ͤ4Ɉmn}4 /N,]ARz39@EaK mL!TY3Y*+K lCde9лQ顃Q%W*j3.m^TQ]Y~e_]dtʯ.!.P>yB-0+ߐM(ilӘUlȐaHv>h0H`q0i ׂYְʾFU1kĬ&׫sVؐMazRgƬ`H*M+*8ԁ34UTTQq1t=ο j];+-iYlkڷ}"EB{ʼnBiA9ksȌڔ5e2Iŝe`1P0Ǥ֝mthZIAr m=X"9w*SJ.E:v9\Ma*JVCƄ@|ҒlhH"! [r̥x^{;2sN 4ngJ_/P F(RrpyV4GR 5I /oH="HzORϟ};/|'KSV^AUYڑrp>K_S +ˮϰ[o[Iy;V*?8ݯ\{o u ~~ͷ~bjw4e4|FQ&=B 﨩>"l[dEí:` _v9KMhOje8=p[|ë_˜_u5},]4JWoǖ=Jazjon>;M֛$zod\h9gX<a-?MB Pjmv[ߝPOeD.45AxgKPИi`-jKS$qcgù3m>Q\S3F&KJ<6YHFI$E-5F4e7)~zFâc{_":$J#@J")]sI$).AYT!c{ctL] ] d7ڄ6FʑGmnk v_g]h'AT[gL-Ǖ +#AsZj{lAItk=:2*S0օӊ/!@ڋę@ $"$ TiT`gt"<-,^.8_E/Ɓ7_$"odztO8ttOH Kd.਀g] u.|օϺY>g%Ϻ87ϺY>g] uAZ;@ZZ)# uXϺY>g] }>BX u.|օϺY>g_xϺS u.|օϺY>g] u.|օϺY>g]"m_i 8,'>[q6t/)HkCܤjL:HKDʦn slX0GBQNJo jA@Zv^ЏNT7 ic.^/K6:( &T7s]8%f_47.םQG_ цsӋX{P'f!IAK9/dvG]1l,jT( G9Ii,c^GN<^i1S;d.v `G̥"GpGm`*Qfs3DbKv ;Ζ5/GWdBٳ*`w]p$1 \D,Dk>ф^R+5QxHvS_q63X [as\tܜ_PoĽ+ .N=9]qiw÷|NvkG^%vքZjdјe ,q(۔e]30]έѩC Vr>l$ꨌ,EݞYέi!6dB7t eZq1 $.5HM\꠸! )'^[-ۗɵl*0%OAhMo(d9CEe-ԗ7bpǖTo0#귙Y{[Y,z!(L!-ʢ^r%Q GFҨV_4h{)o\Y߹ ¢ڰg!;j#- jse&ˢNVheXPtI9hS %Ah|\3]u@jL-Cj|7|92^`pg$NJ,AVa^ȱcU\as &!-?ޅwӱ a>L%O`JuI };;j~!51wƔ,f gLP jDVqFl0mn_}0Rlq8%&G+(ȍo'Z]ڄh\x:,xv]3#J(N݇0 'r  ϧv' =I6$=nmƒ\WFpir> +#7|MnzV [=4gNRCr$S^ʑڣըMdŚF-Q2m?l/W?>Aq_~zz׿=e?):A57 Aʯ_5ebW~]κ]s#.z1[kUk炤7m?qz9tO[Z7[Vkf[A6*.j~xy#,hFQ!Z 8 1 ::O^:$vRKw'Yh>nMJ*%cU^FܐlQwZJ6 =΀XopXў۞'8J/I‹$4r.:{PNZo^0`$-:UNg2w3y.=y:*;Ùl7;E`Ӆxe2@jei7Z1.1` %!$f_x8"a \ݑ^ mh.8Em .ܚDԇɦ}<;AJ(ļBLlsuK|D-NDzch9Ń<8B(rCA&.CScgùTI)K8N- λ6,Ħ`}Ȅ.PIIA*mSQ](ڸz<X3",RBxpQ8biy>%H CEq k ˅WktLx4K3ٷ# ǭ|)y2H9ZK2LTSm:}(5 Ib*!P6)Q)FXBS,8+qF{+/ףkc}}hmH6 3_׮ [LE _=A=4ct]fDK8v:Q|"P'Me-WuR .nG;{vxIKvwEPj޽ !7x1B6-m=Ea7pj'jf39xυrT'p@)c3)-7J5%Q T68-=gtKw-.=0FSӽ N|6м.6\9U$Ej!pJI^;Y*GUtO(B$DO0?zx7W.&KxGm 4 "\R˗%@yH:r"XX,˗0  hc"ZL Q2S-D"X+ F9)ꝧS) Βԅ;Q->YG`3\~杻v܆l;ھyDx^tpC ox:Q#L;"QGrFbg'mK!1+#x# y`ӑMf'Xfy4?(HɐBAj @Ԉ@3)-˓ ,Ǘ$ b8r<|'2Y:J-qOQ29@R Huδ~ poܿ7ʺ-6 Зu+PԵg_[xAv0Y7]m/Qx L~ncLB[ Z P֠kx@aI&WX%9ۛ;J=ٲb8zp+'^pɁ@IQݓX~So%!T _e+~aorcԭÿNz^qOի4Al+9OF񅯖^ԧ oq(-<ҒGh'{pS&Ydq9ӣZP9f֊iLJMF>:78jGvx͟gy6[ZQSa3m/&dVOf.:| fxћ7/~> 蠡@M5 :e+' qm6êysBx^~_gGaTGy?V޵#Eȧ[ Q"N^np`g,ı;d)2(2VEWUz|9 Ay2S52iǧ/F?]LUXMp W8R?7/V3~JGҮs?k!eWy>ﭥ6֮-Ƥ\Wb #AVC'xy7@ײ|JWj,>̟rpɞN!>ٳw DP٧ >5n|~Iyx7Oϫ@ϖ*/[~!j ?czQ(&v vo ym|?6X;|,o+򯛔2Q{7ܥn~9} 瑒o] 1Fjg'LO)t>CӼpIKVu=IØ1X'L^񽃢6zyl=oy.|:0z}$хٲFmgoXWYr3vUQlbg-k~/x(`x`/杶4 *-NF'? +Ԝe݊g1m{đhP)R,&WLF`"C$";\kE"فEq[Ёx¸s " wŋhjI]$x#ٻ6I m>^Ы 6㍽Laqߤ6 \[R_tcWʴ~u+ӺVسL3o'oy\УG}J'o#h/+ÖYryu'ƿ37ۛףtx4*#{ϼաte:?OTG$T^O\~~TA}j"FtA9ہu֡ Ec~xPeIKTngSԮ1xMD Xorh}*l?>!DQz`B͇H|qUּ;5,Tyn֭ͪ\1ub~2rfޔ/ӷ'_Sb%Ha!&EJA`"yD ]H@%zIP4%f'%:Do+!PlTd/2g*Q܌zrP鳇Qe0xT{Ҳٽog{zNeu>튯<@KDo~]yJOжa@-1J@ummpB8&l("V =жIKh/”osG'j65XחoTz%ʢcM]a?%d"fLqFy tGm&WH%=_W΂KP f"rRŔ)GB2"JX=JShd263ZW"dF$TDNh&΂`Pș,bZxY3rv^l\uּگ KTBxH } J,@Qt$(4*BL9[QZPߘEJ!u`Q,"* l0RiB$(,Q)p4Zq~z~Jx;D%kmX%hXCbaLbҔhE>h֟6Y8A6㉽bnL˘eowQl:hL&/vE+s*BX)X>lET ODijoP+|Xv~ VL7 ;ssw"h-ѣPN(ceuqr~y(ьCj,#^̪byoH!]?d>!ץCRzCƟ,&(D[ eS; v:-4) 4tJAvs6.ZA DMd. &rN't( 򚱪Pq'N^pLdh$ꦲE>~\ =`G]s\rjX!PI6VV}CQ_X]2`=lRS=IWn~#8jqʗ/t>-&\T(usi{Lt*|`Ƌ(H;4UqX!B(GcC]6{G)/DME9jK9vm !EY*:#[Dw;5uz'd;^׫:Q͏Q[ N'L57lIGfv J,e %4od.ed )mbR 66fJ2޴oSR_iAXm'{=vy.'MP2}w׈g6[KwkΖwܽ%*l11PhY>tT s@L62E붑h&# l*Z*mRqA%-<uMe_Z#c3r#c;[]I+ Mc,4G,|^ .jo8]\f.gq(/~:z:x:9b(c ;ccljlI, usl=9kS@Hރ*y[ K] cV 2[؋:cMlIlD6RTgå{~ b31"xgF9c!A4kT(TjIt"ϖJ*Ԕ(6D',m+8+@-]AL_řB1$mI+ V Ƀƈ،w qqwHg͸d_\qqw3ײ3k" rV#!uԚG!aQI1bԀ#.>.̻Vܱ/xgɗ-m]j{S剃 =׋(塷[F?3oOḳf,ɧG6 8KFobznY8b![Ġ9e~40CgM(ަw^ؿ=cNdOr| y)95~2COOyWb~YB_h0n&DFX^@92Ek7)& -٤sLS3r“ߗO#ꐗRuҘ?~2Zƒf&_k 4ߏ\eQ a]J4Wk:J(to4&RU!"g2>ɋE “C_ozRsf^_܈Co&l2bWɰ|푢䟻o[[OV7֬TSU򮞎>O3풠 j{K@ٔtt6Aiېۙ+5}zq@Z!_MSLB]XQ /㫬}d}OZ9meMa7DzA{smp0[h1ܩu|`Ѫz([-hw/MDzldzK85\&YÇ]\dV'odRAbpZԾeDSzQz |ZOά$)ƞB3p-jx4_/v㳔?-xp`rm tbҪIB!}NM-b 9YK-wEZjyivN×2rq&&^*ŘH)t-n2,lI\,g!s,Zݧ/m#!aam,JdnfFJIQdJ DX? [8Z23嫾xӆ)D% I6L!MFhPx!*e[H3o4Kѵ]!\8'2eE;+ "\q2x%ddA¶Ut%hZǮQLKbȨ*Hj%bC\۠ɘ2DIJX,vD,ůK.B4)bc[-V?8M<F`<:pyY=Pzʬdpҫ`RDBJK @yA3 vڂ|^(Ѓ(|"QGLԴPyE(}RZ0* |OyqjYR:[KǣF@8ڌn3d2P?`<:=bNE@f/Q!Hb'!w+<]x7ƝCm]kB,5F_b ? w@܆m^s@ QހwK }Y_P9B4lt^HP@ʀvo1!J6zЮe GysMa&AI"k.nt #I`dٵaS`*ۦ4,e^GuF(ʳ<M 灣d5S7Sl7G we 0{rH.};@7N( z[`}VBu&jd|⿁+Ep2ާ+,IƷ(N: neN<ТaTuV<Uc ]ATXd t.C1#d*І"&?z19YKk5SZyzz@>sH>:^ve<(0˻dF(Bu|wQrMjL7: NZ#As8%z!56R zx ^XJ:d*v3^4D2\Ok`J$n`{}0XӦט )Q"WՍjK]y]ܝ.eYY 4YP'!2N]XK1`$TiAu.,? t*OW$"N%g,2B3[^e!f|gkRw"2|*IɲkUB˄Vѡ HU IR**+Sbㅼ]]4|A嚏B,l (IXfQ\F k%{J@LǑ@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H **R`G s?%1^ +#%ʒ@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H *9G*4Jb;=`xJb@R*KJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%@rɎG s(`^ TTEJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%З\ZF[z0|5*KMKy}qbA;朦u8/Q\qW^΋}4R=FhPq( s,a0r:ȺgqZ/ƣjr/.h׻ֿ,,\21L8׳KE(-~ʑ8|ʧDXפr)9ڛҞHVq9耈VO~)ƣ֩ç ;ʋ9^ƓW2{|~umc@52Ju«lA Ϣn,իٻ=ғ|)Y&%%E޻ vtyg.vAW}Kj\Y.wj >~ǐ^2Lg 0]tVزV<M!.0BG7R}N Fy-OWS8ܬVArVe%z*[Fyֲ%pH&^궬c]ֱu8$z:T/ CxwԠL}a`)Ϋpr  6ken L/M>rpcJY1^rDӎn\Uk]nuu(xB°Oۖ#~&I. k*\}TSZZ|NHbX/ZN롶CЛK/zPvߩa?~-dM˿MSg5nb]m/˶1}G:rV BLTR,*$!*^(9?7˜n:yᏙm.䑰b]IJGB~X Y=%M)s(^>>$:ihKc@v]>n̺Wc3ǂYe1'KdGYXa (1>N.˖LjYUwc 0f# 0Swf_]R6;~MB[2P;c6DgbƢRT6i/+O+vhdS/V'//O1m/ >/~B˧Ji͆pd:{9ɏWqWπ'wt׳m7l1Rm>UipNFC1j8fk PEs+{hYJڿY9-Wr=fj&&CV+o%~M:47Xm̦] Ծtݓۮ!uP4=;g-!(WVaXWo ,kdrlf#L#%eJ0quv@Q-7jߢ_P}pBkŭޝHBm 4i.ܸgYf0Y&TLi4I>oY{;oB3>dG漖[\ayo,{qR4<v?[l^m3;~s|#EƍAU?n^y y\ȳCً6!M4>X:(!r~FҏZتQ ?}Qn^NһCC: ;]~}Ins,o/3m ˫2)ٻmlW\zJf.mK!55Us'l/S.$m9$eں6T'lgw2&I]SC@y5&N)}fL^[ӗu} '}O^dQUrR@hvlG>_oZUt_꠿rՌH1N҄\i3' /E)*E^ƖKe{鿀c̼2~`λ"q!r91^q=[y6γ_ Esg40k#&=QB֯&Yr75&4^j 1clxq00^S iL;8 2.r\Vu^h-y8L*<}] _U_ mk/I/Nfl#2[ioF#d"(!Ocҡ]AL|r*z(ݱ U/ <+x6ό*nꋻ?4oyȣn'UWF? 4-r?d޴EP%>ĈE0ݰ(B? KJDT5?{+vw??a&C֙;׻F1oM su2]J70E\rSF2 sL1bY]#4+9%9*%cfBowkD J0jF R[B3Jdˌ% XdRa$.[^8l@Z/Z3/ o7X ~.n cNx&"N7eV;gELUBJa\Rh8幡 ŞqVƱAYqYKIrsa=]0LÁ5˜2LY 11<j83!!lg% ӢJb#ĬV\$v3Jۇ c#P' 28ȏ ge7H #ñۋE((َ0-`)KQ0d mA1~$02q8-@owV k yk:/QhöjeZԴ PR2Ź(4R@VYB("A^+e o\ cIѤ]Ts>3?ꄈgJ?ChgFԤa>{HUn?ROeYe?YYbWEwcky$-syylzwEJP2Is C@?L`m^=W2p`;ǝ4@wp`{C ZAKQ=sao;b¹!A¨9N+e0Jby 1d85ۇ7fYS!acVsvRI1Wvg8_5cHp7Q c+171DVtx#;U1;H0hd&# ү3,Fl(,f 5|ӁcV;bd0ƒI4dcXDL|{6OˊzG}^./vɻl̯V%NAY?(IQ eRi~xDltgDzlX6W#|<_zQZj2XΝSŒhggg;l%X]RN y!K 9i Ე(e(G )%e"qڪ7|VM8eUnmWnm[ϫUdw&:q=u&v@U~ 1 J|Ǝ.on*d chs$<w؜Jz^ɰDCAL zɰ5Mu0,, cbNa oC Q<,bY01jqvA+Oed*%i͓hz[a {xxİDCxÆİT%K #A7p$&5bXEY+ceBvL8F%BWa>HHN!MbacVâplaJ҆GEc7mĔ hXa!9+E4ƒvҫ-Jo.a?!MVFHòoTX?lj#]cM,=[gXL9P'k۽.o3iv f"CI2hUd9n׆˛ U Ph,EZk "O%EXjDiTi[d O B-YQirT6zoNw‰d;Y֞$]j*}z6TEq?ebq_i>?}U53OOy59 'ޣ~b39o~V-^t Lގ.,|7h~3¤ ڿ3I';`;rw{Z,+%c>;\mD!]̟ə C!XÍnmw?^z#r>ٶ|?.|7 j  0 $aGE<9 MD0^;Yn^+&#̓q\ &ߊ} YtŇUk%TD"b2>A lD"o %K$ Yņ(|Ď Al a"_ ܳb1P-t{% tXtl-U~&@G [;Pp 2.uvt1 콗ɗ2Z ,yf. 4c+(T.J*K1Lp\4Wr7SywvhQB5a,A;Dvr$>dI:;#qq׎ |yT`P԰֝x8I ٶ@P%141,M_eXXfBD3VL0V hf#gCt9 nhSBLfIe%]:$F0oC'!j51MU'wU%^{x6@lz6n/&ǵft1:7n[Bi/*QZΰ8eJ ȋ^ꎤ,$U-TyqvNzU6Il?ǨD{vkpN9ɗU ˦iIrH#feeT0|3~<7/j$D.6|k'һյ^X-_h6[\'ZލMjn_ ϲQxLE_;1D;WڂBe+ 9TVrq.ʄ)1w%@pm1é ͺ%F{: Vֹr7N^z~P\d7b"<Gae13@)$މL"qM i|NZ3.X:QD߁ 00R,>4s'G .Ќn3z[l9(D 4`Wˏ.DOٱ&ȁXXnb ǾH4\XK“‰3ғ'@B9ڧߐ/.Y$e2UJݺ0 JP_~NFRH?GIl1!'ZTڑwra:IF ¼KGI/@{5izU$|X9yS, <>䳇,͗ڰQRҒSpcWPBwY =ISPNL2*'ɫG8dE9=N1zY~_)`I41TUݚ, BHW~BWb%̡cm(r HY; ` )>Iۦ9sVvE RmwW?B&oSϦiq ,O+X5d8e"ϐp|x}~Ũ*G`=_x,VOAv T.;v2?k)vzMY: h{>Kqgr]*B][ߔwp~Op|gC!}^m/:}f:ٜ2q! KBsSNHjs ?pʲjb S=_iG LڂtC!\C~['=0 ]f܉`6J^\?( <.ڑ07PR)>/yLʼn)etTcnHo%1|pbvW)O%Mn-%tݍ|Vʅq>$\Uf2[1*! T=xJdɔeR擿WdEiف0'ӡMe;^7&(/sA O@& { LY$EFwV|QʜA,F7.CeO9uǖ U=r7EJ3Z`8ne"дr7EMڈ_]8(-]jRͦzB67|c*(gMqMSC뺄~+NvwESt~!P& Ak$* 1yJ2CizzW= ?En̺*x WA$gL*!\Df%2- ,{- ^F7>Ϧn^}?>e+,*U,dO3ڛ"J,$ Q'BRJeLJId䅇egwcq|l_c_ko?~q6&'IQV5jUjcS@NS49r3[,4y1bNY\nn-ttPh%L`H\P멄ʘ˄x@ayUt$/&,z8Wj8dڸF9),C]ռu] ?Ӟ@[k_l.?'& t7c3lH'8Z5{FlB(jS] Z"aLRdNKauU߶F}'4'D%t[R0 Jm+MEq5^lEeFȒRMDAOq΅8|ׁ;(mfI$g㳦o_ʹ&.J"i,]Rb'Ɏm[hcA%l+/[6e#k8Yf8IBa<Ԯ߽_|]TrcP }uE+99\矄(&Xd15YIq͆a^SB0@"lS-Z>ϹW8fDžCAg  G5rߧpna 0N# s5w ԃ򟿟IQ4>L ;V-HG?8z\}08PA0t=]2+(k!QW[:V(8ѻ N x3#TǮ/s|v@Z<"tg>b& áz`UUp0:3mod/; ^j0fs2L/ӡ%/رqu*3՟yq"u?/uc>uCȲ,!Uųp&1I=|S®cE4O-\2x^((#,I41ȝ{ nʸl8XPxm-SCf(XdYX5$ZpRB=̴.t8d~\P29MeC}FƩ;ܝSͲ*%K;N%É?t\/.L!wHMP h\Zh-2,-Dl TXo4[m貃(U6JDVL4z)ׯq inFlMC2@H,8ة} ɍx$)la$`FH%>* ?r;_Hk2]7"q{?hܴSmw.M>V룷fV[7J6w7IC&AҏS`u2I֞Puw-U5!sY0d ?XNCP"+P=6W9]@D,H}UxXI|GchhLz#T1R)k-aXֳ1O=jHxTy U4-˜ʔF3FX6 )* ܾ@-4$к= ISQ{/{nPHP15IDpPM/oTRZ@ݮRՅɼ'~@Q2n9 c$UXb0a@uT'^z}YʃRRVd8GL@>a%SxG{5rY ^SSϼR uj Rщ>LThJn+b UU\}RB3TIS4zs%VDch|ÜJ;BZAUUͻevetٕ"н೻Nj?)'b8fiS`f8Oέ!͑b@95RfܰLB˶ M [aɅ۱I{Lvҿi;K)/' :-|w.J4' 1P8Ҋ/+ÀQ ({7ڜs!2Sވu3΁ATnRH Z=C Jw;7IDuFZs]V߿\05c\$ <J gSvʰ[}l5swDzjYYbA'GOUypMdA((r os>[>[sqd["\a i~D4~L ]?P jw~ 5Ɩ߂̲Jt(\qmqMiNv2+)czhg!$z6yy\$W_u7v7F䚄c>"G8pDsJwBpj -o`-ot{1 SsRE)NZh\A{`Z\ά'u&hL_i!$岡IJܾh{Tz{9iuۡߌSD L*q<Xں<`v*E ?8媷?!UD3&tSyח+feu}wցEcA0W/̳h(@R~Ez|,昜>$_ͪ5@j:N@Bcj"XrC KƵEr <:%9 ;a˜" z9ԇ'UpN[h\tE]$mQm>GT{ V&4cl>(Of.3#(fN5XWT(0 |\'i1* `Yۋ0OUz򟿟:GY!y!5kɖOrdkVEY6ہE6HS` hJ{~O҅J'ZT~Zuf(M>7R 6CՄ -OYf>.Ef?_n}4Y0Ҋ>#ȟ~,rSjs<ĢĤU`ZMk{}oK~ Hj94Ln?)OWsP՘oA!wǡ)NQAˣjD '̎>RcybF 0,7׿Pg/ K (x&!ЭԙDs0|p~:r"T^\OJB_S[F4gEI0 dٗEg܇vWn\b]y@=9bD ](.gcLhIBޫWḲ@Apy[slŇ<ǗHxޱogo7~3gRP7֧[g֩4zȬegВS% :Ul@kûA{{X (k:jk)䃏:iOA~mr4[*-nќ?l~zaB&:DZg*qslͩXmYzԀSq0I%î y+[no'"+RmMzUsPNsK@\gRH+9s=Ĭc>Os@ifx8#Q1|SI4ݬ6Zً?P$:tZZ7v@R߯( 矀cLf(ǻ]2qFVd.MSOFmӊ/Nb/%ϪEX;M"M>Qw ,Q4Ak0׹r"j ( {\bK`mN YVT]B'*qiA yy PVJr\0)r |,/r6eb&z{[b^ Ɉl~>{^/k:vy X\(B-8svH,D?w\s\hw.\JOR&Y3tƇKƨI|-);Rd%B4Z ZBYHJUU8gk] >>^cYm=%/&Ƹ"^A܊‰z\H=X0Ra6V e{5v\G}?sD ԗ#w StFe;g/b=uG$$^ ]@IϚ2`83`6ƉҾn.~BL5kk4b!ʈhw׿%;G˴Bf,ϟ58qT%כuNBْ ȇ?t+Y&(4Or5FQ Mvs t/dX_Ȭs,BB&6:2IjT$GƗ0]v;8[內Lqνy1=@ )y0Y0]5rHQlV';g}`Qޜ :FZ,*__-}ցkX|\ciefcY>u`_~,"ezrzw>U_>~LaZh*ݗm,p)B`hkAj'.L7R#ŎDzz2g2:y^خ@f6̳x^JrFjϛ&2/rfN׼[Wja}1+!%xYUS~x-x{>a}z ውB%b6u*F%Z3pX8LGQ508! bjpA~k.zXp__FU9ũo< ,Zco݁3ɬLL"O6JXɵUBmD);&=ko#ɍE0paˮÀqvr&2TX4ǖd%KvIȞ&YdHSkPc-_p &Н=\=?V_>"[Föq Ê:oG[L*u@`++zH m*z8»ΧWuǕGg-:i0|okP[Ұ$T"`0By^ܐL&QIzc\E>mq Sʬc3C9HQj;2^B8HT.xo A^ϮHΟfW|vBI?}psgVM砾dT{2y{S U׭`9TpҨW_TDWS;*Nkzӹqn=t7f|\T)(ϝhGwơ$ceiG+Mqk λ-1p yb?BS}n #4nwd=PC7ۛoBs%XfB_4`2X L7c,`eY1ƃێ4T:V`LlJBy΅"ni$K Σx_ jn*b\#inPV}g}Qk:D>I*}cE֦U@3zU 46Xj狋ih, q # d+!Ơ 0V.wZ(6E߻TQL8e3a,OX]ȫ*Y` ZX6 4"EYe1g,xc'_*J\%5xFy31Gg$Q",J"X,S^au $#Whp}3;W׽uyq6Y՝٨:c3sow%fws8SjTv-7l?P~Urs(etanIɌaeVS+\'0(`|w ӟ~pԙ9;ge1>&MOBRգyn"ͧnof.SF:v.:Co߆w“hI#0^eps\>W{8p|QNF[ -YQ-ZK UR"B6^RqʝS·]'vS*? SiĂrꀻ =?wJ'S݇ .FkH9&?ɊR)PY|afOlwsW8s4xHҢ@]{]c*o<ƹWT6 vThIW/cݲ;2*!Jc Qc(2Dt^h,qQO QY@j5u.LʆW{56$Xy(0jhU%W*D_~ RS?F3u2sPj+#!e@yH[ϣUOQMvM$Ë́gmE^|ob2강f!Fa2YpO@R2ޕWנSBEV5pWHӂT\лeZ?kt f\44/N.fC{8u-s%F d7Ӽ!1#cޠHq9Y/1ή`4KiuVv)]kJbq'FA*$?Ǔfh2 $HZ5I=M<8!f+rq\IDղh[|˳X]EO&aqEq/~<uӤʸ7|LJx2K2x';IQpyeI܈/0 "J A~^Yayd/Ɨ0P2iMem^SVigLi=D:&XoJ-} NX"S&PJ\UDƨ,2Z{\1!ݳ6^4emYbԎmT GAiBX"q)UtVbukklAXX]^ZDPM@D`Dau@i}͵,9v*0 IfPʵ. `C`@қzJUJ0 Cb򲦴]H2\ 6sB-5 SXDf@H 0#&n5U\ֺ:cӥe\?u`tͣ8ir=>;S 1 7,3K>OѝzceDzjVMZ""d͗;U'Q}afdv{ף\OדC ,ǔc|L̩.K_/M.qt]Lܰ>x[PcY!DT ZKb .:2%2NSS_R̆?/T5k=ǜ?G1hwh4s ^M.i@ yaQCX`i׍9 ̭SFwGt圫H0'[PF7CwOgaT 0dn:j_g:'B'I49Xu3=']3RjzZR1/mx5op##2N#uD2`mvƄ*%d!pXm痂Db ݓSvܜtZBY ^B*l,$QnF`m;=o24 nת՘E%+Y^Ne]nqn) -24+" yCzc$`΀ń. WT8̲9d#\R iJf_^b(Ø*د"AZ6IB-9/~UdN-MeH^s $gIc1N 46F6VJ%c9)6Ĕn۹jE hF6hX ng7@ޣ&puF&4UM> d42TsX;N @k^./m-?AoO.>庩X7ՙ @je'/@+M[jy̬p2P8 XNauܤw h4zm/gv|X6xd%Teo&%UQDREÀKV!̗G/"beP !1h1UVh Wy.>dڸĜlp'ͦsTÂS( UϙFA|D1fIh|J[ɀsqu-A9-> (V#oY]M eqb$ N i$!k[q+aXHY~#AHD#D4xe Jx%"2.wIIHĘ2t W&rܬ5k8m}\A=N[)N(M#%^*$BHF -\^7"J,!i:`^٧$U^{8$T 3fMXs%(4*Mbjm|lu OI >iZʺ5\A1 @ jv$@4լb]wtrJP @H~b?,NPa_J&>G^vb:Nb۩>@ƛ*G"U( w[M2bOS2>L1bokW& /#L|jPRL.;X1_3V,᷵Â7>ه: eNvm?x-5īX %Eƛ72Iy k@m䬡?Ny9!taEqMkZ9ru C  SKƓP,Bÿr'篽?{ǕrkrWE"1'-?UӮ (LItqtGmMq]2^ɢUPY[Q#q+42;x tǼuaM !ѷK?BSwY7{ln 9ɐ Yv M/ Koq+,u( Ѽ?5ayeݳ埄]1Qbl DiH@btvn}'w@ G0!RxvAeUѼW\cXX;+aql*ӝ÷KQU6hoQ~j-p8W؄!UvV b{'P.VM B,ਭyeGA(;ɈfMRݷ"bhJ%k%Lj3ߞ]<< eu dEֹxa &<A|9 xqj#kհ7ׇnA[3n En4QXqP:`56MJ쪏 Hgm Ё@Nz.!V}UiH)]/4شnt@/[5]HBۆ&5a5N_NA|20ޠeIX ]2ng!3%^'o%Jn :\^qyjq~\3's9@8F2e, eF2fgJ4Z*cYڭRD|2?Oө]~IooZS=x&.ȋ3i?4饏qNK/}zOT]6w?†kp7<*[z Ul(N89S\1m['su3;Qw, f2%n4W2; KsAwD(@% |;(qk{p/zz|˫pam +ZSS=TQ] d&Ǻ@ELX\#LBPA&a0X(;Yk72('Z3T*% fB2y~qlwZ<e,v+]{[N+a[ݰ% 5}}Lob9ֺ$IkXNy>^yޑN`$.$2`dSΫE nD:OI%.:9"Ӄ-ŴTqiFãFEv[ ;h7 Yg$Vf$::k*Zgjd]#KfN}麊tH[_T)N YH8 )l|Ӏ" d7!!FDG? [:u- rP unU\p-JrAf6W\եJBAčSTgmouuap@\c/xxTqmk*&[eQxS.1VeI}axfq]lD_Thev+8HySA~0QV -n7r%i8)2nB/WqV>>t&l&S żԕJIԪ;cN-(]=2 l~:t@L7_ ҽػ7x.YSn71% ȸjC:/*,׺BqD׊Ih3`ץ$\zO[68$F@"YaxS>'KtE d;bh$K8M "XK_ S ґ@Fנpq]Z>#k԰ևnu-EpC z'ym> }e2MB1FX#'*ahcMhɰj0C)諒Ok@Ji~a$y}[ 롆d \ ůw G%YI@ZRx4|?̩[mH BICZdmPBT2c/q8+W|SYqɸ7C9x%1%2ȸ΅F|T"JX2ڰ(l8uܺ qo"oc~Sml>zWkZGk<6(Xˆq9D3PdѬ}cT.xe+8㷋s! wHR7 ' mqg`\4eLS5HHJ(zhgHVA[GlQ&mߐUvVmwz/HPw_bq2Y>يo3k5N%7J4UdZr[+/&ke|Z!dmoM.c7]NNRsa Qnj8\4P X]崹ī. GXR`>HemzHY (9S򅪭19koWctM庬W:s5XYsYkGh>nv8^V+݌ @ٌ*Zp\7`Mev蔃4=BڨNh>׺M$np$aQHt# h%YշNzV^A U/UR=!!u!B 7uE?U8 >0Ե A+R'(W-jfםz۟鸎up49dz4QaP^YXWum.#+ietfh s _]i(YjW놹^|R [%G-qt tDc%krЕBrl ܮF Z-:KtQCD!?j PCl+_k%ъro^'&NIx('U3Ba*6TnB%sj<4v(A+l^a_W@b=5[,%?^K hPHVS>PJm̭-YO$o=!ө7JoΦv>aw"4='Oiu.'tX{UZ1T:1 %%w12Ċ)y!nOMf;&W Mf|TaVK;+]f'9Ȭ^[w_bq222o5 LlYz5S9kf&ox4QZ35Z]SDQIM1%ȸrPiePN&p@-gY.X%p)%zJѼwZ_ 㚒e?ɪncʕDhsBÅ$=k /(@ƥC3BOwzFY THS 75'$fC?66y?n ^[E9*9UDq) H1CvA[H 3OHd+y %bM5h#fMÂkh#mdQȵ8:9Q}V ф'bT1c֣OM5Tjr QMMξ;yow/ @  u=̴bnA_ S R@-|) 4uh?,hqvUG*^a_^Zh2G78B{X2ApG'. dp +&6IQ3`sYuCS,Fu_q>*.]mO$G+/w{|pxW+ɖ~=`ӆffټ5t51/5BtUN7 V뭧i֢'1#yZ"}J]ڼ s7 [<2)WF}szK*Y"6 +<c]S K}P[r NYĭ&9{n&L|"'dnNAtKCFl/]|[QڶM{*97W |wDEBg]!rcטA`s>zYr6KPfW'r' ko\:~gw}8TΆ8!F7 609cBs&ݷ:Lh\ɧɧuvFt䲭*v 5~=߰/u:rzryƒȾ[exƗ/!m;iN|0[WAݭw y?`|~ݳg1jwR:ݽ|n[Д.z:ejE׊+Gï"w&HTK%Wއih1(L2*@cJ1ؤRM\k ]5UZ!k^#GaXF%*dpdKBBSu 9c|GUe1X~;1AE͢$O"bUB"J^tFnjM 3)g7%ze\4b]n \4VG*)V-Lr{|SŢ[!2 ^S5pmchjsyg"ݝaY7\|&Y=Y5*;Ivd[";Nzڞi,:mǑ=W4}zO)00]]W4apGṕHEE3n{FZ؏YBu]*^GFhZ;cpVͤ"DhXiڍЮ߾yFh¡fv2_[ߗ3-"'Ug%3v25s^p̑ycSGhkߜ(r" fmW/{^ɢ1 уUF.Zo_I"vuLlؼcWoB/X&["=$=x heQ"3,7}N(#y&i0{fE*ݢ$?7[Az.iv,\RU`~rުH=85ؓ[U)FeK=?jh4Ȕ%O]AGxg&;oE#OKиY-,9ŏwCVoU;:Y'/qin[_<-Rvo7=W_] ӍـABAakM{nN\t ;#4)RKwy4mRWAiuE oy5\;kk5{Bu]${@.RH^LB&ۿ~~zwYlߢm}QCĂ:Ҕwэg'sd/1oLrI_Gj`=1FvoՎ~<$W>eù[VL:B͊6{JܰvɤYip']"1*PLBF%2}C6W큻:bLdrdAD@ɂ{^ 1Tm;p듰pRO3'jcZg-o-v2}tOOC8Q|g +5 )鄷jU. vh46^ +hԜ9(LtJj_I[fg[BP=̛{^Nҧ)Vkvkgo%L >jpʎ`>PGPzL/L vyP aӥ7`Vp{^zE bk;I< k2?Dcxe8Џ{SBx,Ãcx׾sJmӋh>Boy5.8γIu]$-!BE7޹QZүi88TCJ(BaݧaH@GcIfZefay&s7:1doX\Ҹĸ3emk{<؝k^[/;MɽḦ́ 84+^{LqƘ 0'!L^1f?0}(XzBW.LJ5!{^E|#8-+߷{| \/Υ| qh _J')<3szNq]y3&t1v 8eІPo )죨@߼U:sc~:O`7n~6q2aT(8rIq1n\o^12#2o*2xyΐ120F0}ނWaߺOgt2LLțܶ{sN[9ʹGfwuq~HVXC6HEo₟ֲ앴^h$͌ZRȑlnCοL:>K̭%. A߾|4`?=};[b1[}dg~$`Ż;\ejpثzsX]_NeSI׋9W~Ua'; 翗_M|<_ ?rnKm_Ϯ *z,-.gB/3](Ղθ8o{vBoxPP땪/ޅ"2Of, $WGsrByP6w}wdD-z!dzrqe_;5z#&` ˓+oԃ#ۨeILYT9xpo%^B.RJar,!r1/S,gEUX,|Y?q\OxB]яOLZu~q__v93/G3ixym\MHZt'P E~gTUJ*Oی6ShSggHuntE"6tRh;OL:U}TeIq,S}ISddƐ K]?t뻓3щ/~2Z>'g<F*_+2~j0OyʀiwL|tZ #2xNtz]̾}Eۧ`WwC"Y(+2r Mܨ&BNP(l%fLVW,'W"D&2@ͭ ν[Umu] [45DSD ^|sAmEDۜSi6rXU+<aV=u}/kr[1*Dlȣef+uѳ;=7po9q*ӆN IT U( -R,JVtJe JGs5(Qsm. ^;"RH[ šRNt ^oxpy_JMET6ՌFT( bXZDvZ ^ZcΊUP)("z}Ȗ7\׽y\e1k1 81;U{7zwQ0YzٟQy=]_[G%W!? Evg #C=u)T1h(_Iy;hR#eADUjͤ+$,bT].-G;RfH Q ЛpQ)bI*HOX(]B["l*Zg':\9!Lc;S/b!!n%ȉ0.`5D{^5aڄr0{.?+ Fzqqw sd]Wsla]rk,\=ٰ| v9dJە,w6_:JU"KGba%|^etQY͗7 ~R9i@FX5&hVd\gkZ,o멗fZB6]&\ 3Jfsم (ZtOhM4'oy9=t;GٺG-  |0ڲúW:R(g\MA2byTbsk3V{Lb1ۛaOՊ1)ޓ}/z7D'kވxP!+Ե)V5-6g)c-! -:1d5&tWe2--C)5N7}$wT/ZƽRJooƽLM]VL=O6AIl$*Ⱦ F,զ+PQ}7 QvW0po&ò'Sa˵1X!1gȣܚ,[DcʶhG$$ p":n9T)4mL{u^syCLd W.0CPCW=lLBմNN(i Vsh5qEq3b1ډzQr+Bi$_TEHp-&`%%^휝$6z=#zĕz΀B^Q1ȚBi!* GrlS۸fQm6Z yه$o䲦L]*sK5Qqu "dujZPWӍ^h;>Xi_ V6^IFI=d#]bl;XaM @ A,|Ԉ۸N=wQSzj1Gujo)G5n>m@!u- *b֋I&,!9D;(:ٙ#V2b#"SOVH.FХ%o#V0R3c5M ;}}Bu3ټ~*X5{U DYmrLc2% ]KŔL~82 #ӢDXX9ZXwzl'}Y/su:/9e?7]8/[NQQN Qgb~kFg"ԔFg%L牼N$*:NPاe6Z6V x+4-ʃ&&cP|\,c&4n+`cunMG8©@uN’J1ih7f^?~iyU2kև4]=nn_cͰe[}eB<*v>`("Nz:.쎹U@O/J; b.a q.G$Ȇy aɡoi8a;R(ɖwӢ@tK} Y}viDxgSƛ=.NnHjp^ks)/d;cfݳwY _ˌ]jd=v~',`avff!XPA1Z,pB3G:ݻu&f}@H2{L}y;SgBPt Xw:3W4gj7S!f{x>՘_o2Zzy{6GGqAP&gú9YnW(jJtv-)f~y4sl0TzdҿOǣ!zoM_jS_ rkbN`e4Z?+]zNCnm4ٕPl|,#Y|=2ccۻT駳VaKM`&5js]#kL`*A(ܥ BG^k0 |f7Z&H v "l/XH1=j'AsSM1Ģ;)"HGnv-Gs7OqD_j;ݕy^;Ggj}=vI?Ï[ EQ׾g|{{]WS)LM! iH _1B9ǒ'g<F*_+2zlp4ʟ7+d ˦neuMwT::͔FU!ŚI^ˆ\3ĕkEW]c5D sѲwwl#Y LY(($.`,Բqo)I{.>`kv+.Ϟ> +WDJDvC#(WLNRWTs6_߱ھe*nW] ^Wp#nD۲J:LԬO.,$Oz d^ޒb w'k0}OVD^<(U,SX{lFN80W:fMii 1+޸ْNh#t&LBE:Y3$Z\0cVzn?#"uXFw!g-@:";QmWM9fŽJwQ糟qHM[$'U/,hȡ<КkkB:F(AHZ@4,) )4Z >]/N6y1nIhKXjA P uf2 )E1dS#VgT=jc,v cwJ$>MV:*# 參xV$(z% va B{OVOw 6,b"dJIw^H.e_"n3A7]cNH^GIm׻o wA@6 -6kš ]D0ar%69#2.^!:$K_'˴3zu]5>{r=| ݋8h׿7+ C( 4i̫Xm8vrNgnO:ɲF˻oH {=0n]fGfaJd,AdOݟN帞ԄS&3lq|Z<)_/?Em!ߟײ7U;=[C?+N2<`QX;r'X s@{JR[yR`#!zS*ԄlB+(IM>@`Ț-Dz&GZ&*j}otH+i@x`MVpHciܦ5 (2cl,LKCkY}cˋ]w5υi1bso5X{Ϗ~>|?ף'x|)#Ǹ/qpkȴλ/,Yr#G?83?6^Se ]#xcЃ3oռ0E`p֫a '*}dLڙTեSIoeL6QPJ .b`BqˆPuΆ\ I[H}\Jŭ&l3$jiOc'. #&n2756B(IbUm M[P b% | bz̓A8X\w,<DaO=/Ağ=^ ~+v}3qH%2A5[h[fFS5]s RjMF{_h!ZfRz0m[Ad9ag^ $h%E"[WXdWrVG5AGD?Ϣ.;z˹sN>xR ~\MXк&P, V7A"تꁂ:>6.)wWESW݂+חbSUޯиf+kWX9R\SS%Z(EpFTr F,t3j{wr"!)0etƸ,7ǜ0%"{QkT5 GRҏ/)2^,L\P%aLƓ e\D=۠e#HLV'YB$c4ղ:ϛ(QlG]y:v<!}%ןڋ6BĒ0J 1*T1OGK;dH{[ϔ^ *vF8u;5@J츄/0Z u!W4e8=[N9leW"f[c>QcǛZK%f oOڄb!QM$)SNb1}$pPaܗœ |{:˶eyUg\gO% h|Q(!4_.qD0z4\*H7-68yaB? xNA"<gr' =onq>y2Ο5HZ7!`kJ<xDfZarI] 9sY:|y\ թLQ ]Q*!΢>l 2Yn:SX2gDzU+8&c3vt&fHt>x71(7Ĝ_mJ3ShTuY0XtM9 E;a7ɴ[$_R:7YZm@"7rڮ+sJg.El2^n rZ)r٫Tp9cvUUm `m/IJt4zCp@,m$,֘،a漚fв>/V(+-b4VN+ %*!]bӊr]®&*9VKlD $z(o䊁ŧKbS*E۰!X -, ]uKM VQeǸBK1n{慿ՠ!nv@U11Պ7k9gw~ȟ}^9㔏/_ry='g֏)6i5$c͑00i vvQбbVv57KʐAD$jݴDH칞X/- /PIv1wu!>#Α[2#,3`iƒ[#[T_ej|lqۢfcLLfd:j!7j3GD\0 1(6 a-*t1O'T0T5jq: JrKбslR[G]oUuB xQ,y[7*5X[ɸ.C(5l^)#(1wC"ik.#"2/dkUW説 U-OlwHR5<r`M{}tL8qk1@\]SnR21c&D@cUsx&5=„{4;3Es2FkuN&DŽoy0ژf1P@&{a6UCTi٤XJ0!k5JVyVL7R!r-e59l#ʫj@w?-).qنtwKfZ̽2bgo{Ɇkg;q6F؟-ĮF&G_27n{篿<1_x5E^t}=u 󴨞ޙ\6WlO;yxER]4]JGoltQ~U׌ 96JKX9cku!ʲO8*%oEm3Zvw嶔iGjmGe%ߩ9[01j -fSM69Ph8VW-kcxڊ6ϫf?kD7QA$BS'|z[܆w$5,8̻ψν wo?y:>?A9I)|hτ%go76[K kHlN̗}Q1D&@+WmBN4TбlcƂN}2ʑzڳb#BUFY{Eyԋw*\y1W^ BIՠtTTdLSuٹVUԶSe(щ)xʺFݛsg.W.(ګ4JߛÐ#-C;@![^ӷCQ'd^>EDg[}շjb{@a;7ą1cvL֡c)Nz-콛?/(9QD3Ff=Oz O x8R5?I)kCQ'ʫgcf2Ayze;~2!,Co"_֯X0KI+߬k!{U9T]UӢ:[ULf؋3*VSC-77gԏV:`,5t Ak'a> N/\V/L_ňV5E OG^Vȹii54}N 3ߐᰵǤE7W[A BcL@( 30pNL^ݞ%D҈Fb_yzab;iпY RC ceE{ܽ!q J?30C<![# SxH] y:cd@q w](@=Si-FWbh4eAދځ۫>pTjgz㲍Ҙ' DŽ`V`ﷹ%BCq}7 mBulK&S@4^X=:P{Ke l?9zO.#GHscMx4o^sԛ׼ns7'!ILu&@}.sZ7k4<_#l\ <=-肙GpÓsn8o`{Ï?>;:{yr_e7++U((LJ5]pyw_/w~w-}LU+zȕٶܺ8MOo|6!;hv?N,C?5 ^B=oQMRKȳit }.fMz3(ӣ~``J9샣صÑ!.gIvy~w~˯ Y/NÇ;AvnZ\~gm١qb#bP8Uո+n6rDm2h*?@Bc쌼;#?u^W;t݊]i<촲G{{+ ~  WxS:~·bw<5NI`$B"fx2Pz/PSH55L;WoP1S/4F>|,4֝3+5yUd,ůyǬLz1vơ퉫$/wGBi}2-7!-2A  o @2cp?dpC;&-zazm~x2 郱 L.N]~T{ɁSn `tNekJ 6ޅ2Su@!cS:<|5; 3\WNܲS)xr ژ%pg}dz0A:nIudf^7lu^l\cPL޵đ%鿂xI887~Vhg5ѼFjpc$UYEVeݭM&y"D|qY8OFI oHY(VT)>ӍViKa:R7wm+[ɅRG?hY"d<ա)lErU hŰ@D [mNLBK8;>#ȬdMؐ5tc51/e}aW "I!H2p;AN! a:-?_eB/qLL N$3E3LB?es9׉Y]$8G%Ah9l/;EQdL ͢CeRʳ$=لTO0+^-DVALaP?P6hD9 TADj c=_{=R*PQRs'd@{pe $yQHwmDMcZҎ%ZӘt?-@ƴ1-i~s5|3։0,t NX3rC#+a:8ͭRܰ:?+!2)oRpG޾MM uQl:7W! Z"Tý %Yncy}K,DJ-2^9cJKgN3M֖lϊlI¡= vѱȃ e[R{YȎrdnhٸq:{&‏PF ?c60QȖ3 w^iS{>BG5>Z e*T*!kPK6A\x jA9ArDa~ 5~l}jLuz {^ B0U3 :(raZ`Q eND^m`n Ĵn\#>'^U[~?q/m2@މDC4s@4lu,HMTsZ/aܐPZ@C .ev\ws``'CQk :L "ux -`Cq'vn|3I-2}s˱ ]fV?`M-!矩! ZeALZLI89xǶЖ3'cX2Xհ۴d / zY=/ƞ:VѸգ=8ī;Jv;!a]3 6錑v|nɁBo.H7dW+-t%+#;Bvt$9[c>;*Gc%9`1GSg;9`ȤP>B<6a"WtLw닑urQnHA Dd)Pp9G֠ ΪXEJ.(*Ycz?zodz`+*3soh,'R=^ YYn'm{.rG}yvgd?Iͭ k꾓I琻trq͂|jP!bhatMwSHo}݉ m+[Aܤ 3gϒEXdc*ڹ<`\v<O3uQtS4K-xK0Qh>>=çy6 ͲqD{y`ٖx0Bu6/n4;`7qȼrZ7S\z<,˗3ئ紏/Ej~3m|\^tb/}ޜfVާɟ7w.z}O?/OOՇxŬ d "f:)'9UU6oe>Bs"_Cb[`v|]..K0\s`FY!v H{X:t'>Zd+AF:΂ HSOK`̍+Y[ H[-Etl3gc~/,ωEXwd5;ciKclvv<#t{BNָHyNݩh5񼾳m1/ӊ ;izǔ&wGl*ɳ}75bv_= %TB}q>MiXn_ .[_܃Mf(z)uBzHXN\If#v8܄DcLDung;ix˳ 2cF#8GoeRBktb|2^?7:G| k惃_Wўsbqy@̚|ė1e-Y*ѯ߾a@LK8E:m9`t(V㉆ԏW!:t` Ơ"J-6O1.ύJ79'xY-m"ToX+h֭,˅X!ㅰK >,ꙩly7rnǠ  ENbN7cx w9 p|J _R`zvJΪy[Hq^TX1!uD6Q&5 ]\thbsN|f"y{1I=``ǜ(jٜ``3ʩF}vUu{gWƣoZg-::<@psYf҉?ݻ{7ME._._$6̭+~SKK,Oi(rC4s"Xh+zCTfn`<^#!VGH0 K Oa.Υ&8`9ǓYgbXg[󆵒`9a67<ǻ:Q"*9uMDuIrGt5+\z3 w手qw v9yXԳwg2.&C_S>aFZKƍa/#Tebm k$v +C2"UyٝʏSVٹR/m~]I|E|x71B|鴚31kGptTj^פr~W튎_V yHj[{bY>y͂wagC\^\I5iA_џCwfDn@M;MZ'FښrTu/'#swF1(A('^ 3Jy_I ʒec8i '-S}uCneEφ,28:p8;W39rv6~: V'xO!l++wG5}1/?>LRַԺ0l!ܺe<^ff&<ѹyKr_?bYP ^cR:ͱC?!3 w  18׭kmH /w{aak $g#rvB?mcEʶ!Eȡ8Ԑeaؔf8_UWWi]hQj\a*'{V:x(;3~;Pٚ9[*+7 [eª -a@d/)b7H#xEh*~JpAB>.?/PR/Oo)3Phg?;orY嗻1kۮ!FDF|<ųB:o̲BPтT\\TĥӝJ,&8l9h9Ҙuxnb4]kO3!P!dFgkx ,(g HVJDEHb;neX˫hZ?}vxARi\Jn U)W 4]L4qB񝖕Z?2@֤p~FqQt}{\:qemSqmy8&;lDj#'l$i/Gb緺镉u]pDWb* j JmY7*" 7,oGVZUnS4'@)sjurp@b\E7 !@2調wLj4WE^DžDE3FhOC9tYWN-(M`qzytz1ܭ<9ڎpG|МwvTn}߿"M{5i׻^=Fi:Vz `^S-|W)]41H< a^0B}ʖ Ӛv.eh?H0V3p:RJ( I*c7Z %ڕ8VkN>sU+#zI*S^[-EJyj)BU`*[/5iLx 8tPtO(h|-2LJiIB$xwૃ -g%פ?#5k Nb7bGAYB ='!M9`w !L *^ycB] FɓNVA VC*ƤQ)3Ȗ$`v(7CXa m,@q EhjFjZdd:x\QǥN "c0R/gk3M~* &1R#quձHNOes+e1rF=NDQ}1DM2D$j8sZdQIF62 h%&L1o06'|ޒ r: \.>ОndBEkO7ȝ iqXХ-4/;R.,B+_k W]ՎbJ?ɪ4KyAC!"B@qTu$.b짔}ElM^e9FbZ'aH*GY.ž eBxI luFAv.!3ZΐM6sti:weÉ[՟6xGlmmOw1rgٔx&Rśn?:gMį-)X]TZӣFN"_TM!$8ёI)+u- 2k(˚MQҊ r)(­ҔOn_j;~4&Txy΢89 ֢$\$طc /?ԆJ;'>PTd^:Zy (rNd?D@N.:{pAoMwɣ5x/o7;HDq͆ Ĕ몆YSåW Rqvn[4ٔuh} r_Iz/1 2ǀjÆ'3o㞷e(pSGҽOC|̴XaŽL4y>4 Me#PN&\H(U*D^D$O  g~?ټ1ޝۙНYmxbE C5@D&Nd$8x`(ZEZE]DPF})hF4gk@/+6֫ZC2\F[-7緀LCBTj%o*_ӤF[K[sq*[ȑiuߜIZo(^}aݻM Gs䇷yʱ3jo7F#^,;4K{_LԺTp88WiQrT~ghGMX\L9{`زhuBuͱTFQDϼec3+Fm["/r["/mT"X@K9qUEϲ=6FדXlNeu3.3_jĻUv57lC.%P 4E^1 nij*Pt?<\3#̗M.>37B,';<=<؈Q-lF3Xx]Qԫ6yzNV W|bB(W(A:tGT:X&8-dv Ҕ.8NLv<m͆ܧYp8#gH8)G3R)@}WmdɻL"'koGai sgPr.Lb F:EAb<sϾծ&߭*[b*q߆kL&^}f9zk,ᩆZ=5jatUNW5y- R!9tˎm$ /nX:W۟x28|awW( ?{Wȑ /nIyD^ 5zfg7uY{"%)Jʪ%ݭ_qd zm@bx}E?6y950BJv˂,i9~t[~jJly k0. j6MOO)~Ge, j;}N?U̟SNV/V V&+i`5G?)Q:KD4] T*-GX%a_2H9$-"sL^ q8ZEdy#69{t]^FwvuPƶ Ή=$%V҂-҆i$Q_>vP氬oÆOO/b.A9t#]F 9~x~<FYȓ?xʼmU`v ̟&_ѿ'/.=zSdLjP HV6^YeFT?pp[#AݱK [`a7kiZ`⃮wl;`7&+d Ne+;*2|S[@ EPSmy$V\m?&[Y7 Plq7~Rօ t:(ѿw$^C+ߓQ r[0Ng?w{;~?sq_8 e{ %4F2Dls7⢠T )='") hѯy5>V@X(h>@nulFۋܴcf$T>\V55B4*7^χ՛Pfސ%RN4뫊lgS[+O/ .q<>e:]?ݮ_~AW}su9il m8LlbY5ļJg٠ Ign49lϓY|޳5&?Q?prϔqw,^4+[zG{ Y]^}v{]_h OD4Sg槡la#S_ZZgYힷ^f|b]gu'yZl%n+sp9oy# x3!3̙&q 夋c-k۷W(u9VE4a|VqwpUq 8^T9s<7,|u\Zj{xb!&%xP|2UfY 8yךQ"`_>YEB3 kkPV0|$;J*E0)_Rכ+']To ! 6m2i碊6񨋪Ű*P[-"ND]("KyPcġ8; zm֏* }-mj'?rA70ng m?J!9adsf.gF4QT 6=KYbIvd#:4J f q*RfǷS΁Hi=YPl[Ue{"ojfJR8 BL+ 9'q7X T*L@u/d vFX2Uy-% 5R&Ꝍ l3MXΥh0g 48fNkީA7 (ZNHpU'Xw߽I7mMe 0d?y*T[vr)0{<$2QڴY>+/vro0>0Qoivt>v⹒r]ٞg2.3\t:ba0| ەRϼE룭յ> i"À2l_gi`# >wTM$mBJ?&dk,(؁2+̳}^ U$5l͗_XbjrT݀JKOJKO}JKXiv&sޛ#3)9onC@x'tǞozi(ޤR}^N3Y)tk%#?A|8.Sb#r0MBI=c]%p8EaIΚ(M"(mpsóQjT%*kt,Ii-ifArƓ')1\r*d pJ =# -K5 :fkD(/mY$+o0DW$Qppgu t)w%Y،H )yy }rX pb=;˧Eߔ=Zd5E'1J@Q!X V|#$u43!$H\_nb~kwxOOWCLn>w<ժogM>@&RH80Ts#rs$4gۮ`OPUU[s"]\mvzt)_oVHqή{u+ιk*5cS 1+Nx| u j5T7]x(I$#lQRGC6O<. fu >I!<\ZA\@F &)ֹqHnG`#.W j$놩dF Kϑ=( NecS RuY**K(8f a&|tKlwa;<{6cJe|7LApj!p9r|$硨0:iI}D@\F>1&G 0 [@HQdDJwQq9̆1mih*hIDA[oHCupY)M'l} P\]UCك~]Nr݄8g78q]7NІSS)'sYg nBU|8w(͕9hD'wD]pQQFB *se\Brѡvv*|MM/8rV1[#jFy~SS.;i_;w8;1qѕN| )ꘄwFITTVa h|έ,nLv*|0:~<?*?O/>Du'1(:,z(WIݪ'~A*SI,HGI & 挑^9l DW P6_`%%k$!LԯZ,*m[HQT83$SBU)q\BRnqBTyU-RneKbR&epDxj# 7Kd@ #Ǥ)1Tsh;BVk֡Wl$[^ wmmr ˍ LpYbB%\-۔IL2D}" n9|h8]x)ejA~TsT$̣jN6kEj<Q(|>HwuxR*_!:<9۪Wctcjc\1 p+lQ!1A[pW }T-I"h#HJ S!+ƶ6_mcذ !aC^6Ua>| h1lH;8adK9Z bXDg)IlRj Q 63b(X3xb$Dܺfq5Oď׌8y&!| +H'y̞nfuTmfkq̆`wIslzޝ16xrSdRʡIW,5lx,@.@5?,nuZLTi^|`:Zv*w[Ubzx{˶|ǭx"Ne( LT!v Ze !2dHUP/>0svӅ\V~2t4O&("oٙ?Vx ϏfQ>}z_T''y錍>{? {6Wg8E7?LcfvX*a*zz `.{UJKD6JHn7smuU`g 3'mUdUN˯g6Le$P9K ʰL 0U*PN*VԦ G*n a/>XEe;ɖ m.:+1 g9wO5Õz 3BX-U-neaX*k]X֚'VU`xsCwW/t}jݱw/zx z*Q*-Pr/;|afY{`fUſSdvjN+Y U ij=Xdɲ' 5NX.&ndU/ت ﮰ|?LixpY~ &]S)&|ZaeL򞝩6} wL82.5H*ooc-oX2yi]\yeG49 N 9I_rNi%]ƦԠ*)ƍIB c .Jߗ?X֡"ʽ"VW $PB#^N1bBǦ[w WweU!`Nrۋ{3y?:=H xeH:TkW< +y(ֻzA# a˵bu3 #f^ s4%TKe5*Ӌӊ'{ 6bI8l0!w*0eGݘag!Ӈ΢=n*F{ _/T!IS9 AO7D7Q@^l 2uZEv͓e>F#;nQs] Vvݘ|0yseS݉_@~np>]x9γʬOPDMx40ܪ2(W=;+{Sl|ޛ-Cf Ƌk߉iX#Qo 7ЋkVvr+Npsfm sxbz >]6 A흍Z8VfEmw\m{u.4|}O} MF7 ~5D*GQFv=HŅA/?Fz %G7NĜ*,} Xj 6'?bO8HXu.{p1ݝH툐L`ǽКIU%TǍ]Lr]wfMw##Ye4S<԰;TNUe(kRDgǕ#Z[Pmt]?+9*~͖ :()90o&jQk.- 6Ara:2} Y߾}Mvogy3w،͙殜|쟸|g3L[A(A7 E7,6:QKxI?N~:o8â\5#yig;Fu;6]~34w _?g^cN]E7Cgz9Ϲ;'2NĤMSenǃh0Yh.R".m7C+DDI0 F( 8NqjNA^bmSG b_%k.X+s (= ;J31N+ٖ@MLp&nB"PI3*l'?g1Ie .jo`S\U(NۛWDs.9c$"Rj81S6jE%ZHANkDF%6co,g5q_eָPI9diW-YL Y ϴG$Mh &1Śb%C([ π  ,e뒋fLu$`c0H6o k?P 8PтlhBվ{_$Кaej@QE`:Wfϰ; @ IFmf*8wg)Ldp3g3=A趋GA9i2*My!`{͎R} 5A<@Fepɢ*PJbdhrƕ 6DjSn&h0ׁlNx6O]4Mf6=)>/{L7*j XŁm1q1NK i%ƒޅDW=Ml\#7`꼨!]]ojRZp vA8agO: &pPєlNZn\+nj'Gh5 ‰:*%&шH+"uT3+]d%*(Dlr Rx/tj;+?n?;w *R * X']#9UD9 9 BkӶL8;hT'5>hO&go6.7v}v}۳Iax'2cjH{Z3<Zd>JdDicic-]zn&0Jⷯ/v~QDs_o?y9T/!%3 Ɲ2jL1.\Xgx gzFk4fQFo iqN4/t'}9 FkvQgzD~&b*Bnݍ]g"Pa 356P af؝x͕Qš3D2>372·_*F)iS=C˰ڐ-{Z00t&YȀ:iO $bG{^SLQ=J bGQG4]BO3- gތI٧Z;lD;?` v.\E1Xmr[4 cb<,DikUl$(N04 1"ͰHTa$"$榚oa` _T}Q>.j1Zݎ\Lc;8{gk|e /M79Vl R)kwC&ٍ?8p?.\ WX *"KP"bƌu!(L"+(WQ4F`EqBb"U"ٻ*w՗o"wTfIoZ0ZoV6pM/\ tt$q$*bI@a:F:ScC DG$0RH7W}Q%V%N5޳IIHܺ={rÑx et8#fMl޳љX{,8KPZFRyD+b[lLՊ)ckMD1(Ԗ+ l_ (zV>l*RzɶU&+z> }4kU ^1QT7'1Iaxp8>&20a *E:5aie~2Ƒ!g p' 9Mec^ >{ҮEO (3PgEUUz%s*H-j$%ā(dPGADb9P#Cab`! 8,Yl9,N F"VlGjdMU6I^{JTf6"R\Q蓤T¢44.o>]p F(`jF?v A Yi|p+q|`0NtcDIG”`Bڄd\i 'bM%em v2+`AQRYc8w1Ũ2㗌f }2ϒEUS+7p~x6G3%NfT4֘Q.(1KBKZehEcn@xj//kM٠*6*]5e{f qi*y p=9zANe 3W(o48݊o3u|/vf ; xE^8pRf9>R a""@4!ֿ/O@D6#9A m[_f$-.vm1m5JT]g(٢$ɮM,3}y߹1Ǖ tw$G}z`F|M;XN4CLa+u7^GP,? l4B;ʁɹ;  "jaM7)nhG9Cl& ,JgBi'\UcjMhokNEEX?~?98FqyrGϯɯ"zdz~YLo/Z(oDmc_V|ٻz:Dwnɗ)~= ~]'*0(lK=D O qKQ٧w֑Wyj~zOiTUxUHM=5wީx qP{u5WZGxj xcSu~q 5<.tqW#UI_teWj@?l S3ŤnY#ݰ1וݰ|:~a͖fnfxohdwV[!U:ܤDpj!Gz•C_׬qwPxKKC&4#KzCf%<&MB*4Q`irZǰlƼ=jh0ێe? 8c EJ#mF8>@>"h93{52[ԑLYn\K¬M K=(dvyQfq5,^ ?_lG žWJV zb=]Ln_{!xE,H:XZ<88vvp;h=iE%HnIt9(o!sޅlsZm<4rX!-`nP[==άDPbpb#˧mI:Rm|H'+UH01B \Ep9^5:sSTCt]R]0?r dl稔r1'i]_޻<2jG׻0%U-[f#I{? r; @W-Zt1 ~:V 嬂6kp@L5@ǣ N_0.ek/ۖ%>-< $}h4_M]G"e:&~@ ,7r-[!AH*O<(3!+\^A!a^t' 1hAAѥC)߁~ϽmM}t?i:+Oj]ÚJw/W'D Z1O݊*Iz@l>v^qDO Lu>e$inWˑ:UVqZ!4f(,%FĽL=:ۏ&|?'[6q Y@Os(q)ap1# 7OR,ќXwgDȦR P+f! MӕJz(U|Hh :> -8 ģ}|wG#̵Fx.#Z1Vwo%ֺdE>B$'$F})WJfZ`=,G7G k\vco~=B|Wh}ԃC!T Q%KMqv-w-G$l0CjJ \o}1Yr64 1f7`b]:'M IUNV'`z>O@1˗<ٍИ `xۼ]xpy eP򳹍,|5~d*?.--3L (y6]\Hy{"&N>\6- #V Qniʽ!e]%4vd52$@4[Ŷ GKClڂȵkڞ/0Ql z#pDZdABkx sYV;' kʘ䗴#t@YO9yaz ܋NɦhOd;}t<"t08t:"Cv:ߡu.#^.HXkI%d:!U& ".9<9m`u eG@i#tbTsmqTf/nCIr46ұe&6I*8‰63 a.qT-0b&N$NiDFYSm3CA&\pRaNG&h䘫Rmo/^2TH*K rؤJZ G6ɜMT*@1¥0D$c\Eya_lo#әwڝ : % T8"!,d]#R_beH#Llw8b#SL(Hd`s>W+gqrݬÑZ❘(@m8ORm.dq;ƥf"f8{PdVZE}NP;jo[pKEL !^J@ _j?ָ@Q1vdq+`WA s.Kv]K]] lB)[uܧ N}Ladʸ02Q:g%*x& I4Dv; 0Yhaq+<7ҢWxCm5Xp贁4)QI⤓&02.%b%1ԩ6MI9  C1nC~GۚtkBJ %ZIRЎںD1XƂ[Д,%R;PJ J2MĀIR#*RK-3aRΰ̚L Yv7 <.%館+E2FH)xҦ&GsO 1*0T *T 6Y8'#%GH0gƉESIO0õC <{m:MheVSJM-xahGT 98q}pxI_ Uz \}TS".?T# {s(<̀#Aqc9Gc;9@g*FdǼ*ažc\h<⸙G(*Oh`Ɋ5 T 5!gCVw%\l^(-vܶc<sA/߾# )Te!dW9JL0IBW!"ivz'Wja%0mGwl>6sx,sXF v'%D ǘnewXz *܂| `t0:Vn A޷v## ~ZvS#7jI`REjH%E<$0 =n+ >"- P^#p px稔C^&h&eAʓ ?#8*OsX>0|&7 r j:EַJ R+idmoC"ujӼ_ڜR$rUL\e:cs5󱝟^;1qkNA! !C$^R(w4˒㥱R :hKb삖4_>&pwDI JT쉵DpRjt'&G! Wl^}7t{^0KmY3R;ϻX9.bd(qȨH$>$$V*GA Î&ZHDRueA<EX@ Wl/1=rLӜ+lၹlSFL ׬qWRyJG-P.>D˙٫Iu9PH{\KUu@oﰇELQQfq5m?_ܦ( (!P0^+וn X_7+ )ޚ~{ K~L,NT4la2{R#Sk">1\tbS =swW u+DcZZ8,ICXAi$VCMJlDqTw^wN5'|&Dw9)[C׊CJr}|NPtz z㵵m6/rx0v:43HG{A8Kh8 ̊ G!eV.|JNũL#3$4I}/V+A`V:!&33pvn`*W#I8+VݒThM=c-Q1|oɸ|;CJ( I-'$c}&U G>]-Z[jzk׊~2L|h_F65f_Ku3.h]g+'2izM6d;ӋsfS wmHjoo$:]U^m׻_R %j)*w+CRC Y+y~9+) ZFYQlu)SXG8hGB z)0iul[r:0!<ԔCl07'|rW"ם28̏.+i]!pӑ|M,WK ]w{_m!`@zrOlOO`kM3폡Ԓ@-p7I>;\Jq/@Vh 7s?wU"sv+dg$sd#Wnۀ}]"O\I|1*wWѳ XWWWG?\+ezb凓ގ^q*CȰ.j#Y1u_.:\O.A!{N4ZICM0P90 ؆V7>yGU>E& ) qJØd|gm y6tSgd[v%sY̲PV{*5zײ[B b3dqN ӏf`y_Ug!T5 [sXgǭ/.ޖr8ޕRZp#]*ezp)6$˾D9 ڌU .u݂whs2% F{./8⽁qmƳ)HZBHHBųje֕V(BţVqVn X˩ɝ⌗%`+R]ʡiGB)R'+AzeWX+T{gEZN=P%yĖ A*;i^HeҕJPUV'sORS  (jwMBKXcŁ4䴍D˙|>UF=Mђ^=+,vvE2۸>Fgmε @Nh0OsZai,mmП.CdBۙi֏KjQJǬB ̏Rk"f/=Jfʷye~~^k7'oD-s.7.>ƍh ON$WQMʘnR.M,$J[7.+ë!-䀹sѹ7d ۡAIC辁V)>h!lwbi^uz}\d ?x焋&z˹}>db.! f2螲*k]7Y*~7 ѿm=mp>3uM|L}[*D]iAS%]XöC>-X1nndmzݪ[Bc_K5֙"m= a40b%) (TBIδeuh2ڠCSOD{bv,syA%B:`"gNEu@5*DxL)unU+PQ*& ׆t#~dZn߾%›?fY!vTH10oG:>t%cjV&aTMղw#ֺ* !g\Zڬ6B؀{Oћa36 H†ڧo ۧLx[ v"Ն6d:Gm>T5Flz=Z:UHǂp~68C(g?UN'nvo}!T:o3is1,R.uT fjUeZQ]Х9!VP(@Ks=[˲+-~s_ @5's .Tn+{&zĤ?V1~prfMpВWIjh;t8"Ùf:*A^猒VeAsלJ`8iVp9I`QbR;+s\wvdWϦTV- =[A L e\b2e eJSk5BYn bt#_dJ0t-ZoJڈtL108sr^D2^ 5ކ"KaRw娆9" ˼,}$~rilXp Wk: +ƵbQ#khV>ѠX,HSzZrx a*-82Bh-JyAu^y+oHMw7EAS* WG hKx;P S.'[$' |mSM&?MnPnz]6{{|KdP094'},[E, /c ryGn:zh^T;BHQovMޞ%IƭD]E20 "eܥ%=/Iltiep.5panR]~4_)J=AWKw0ɨd>bH8Cy ?ܟ\2?ug;O frJ)-!BRXR朁RQZBKfB?7*GBۓ^/:-{" ":慹?W~u~ު %jz "Qx\"^"՛( Z"d u@#r^\xT?2BDwuR/[5cﲿĆO B܄xD803 / EipEB-c9c !EHAB;.=E?TkC3PQLV<8fi"ЦRa^"W /t%xaJf4HNDͨT ac+ cYG'#6=iq8P߱ ohmXu?3" cl)e,g #S 8FuipaN8S/׮M\JV+z/cP~ z^:$.β,zU߭uO ͇o߱woWߞkZOzW= ڇ)8alɵ i[ޜωR"'y1h"` mц)u2w?-bm{\TgF*#9ۀcK }2.jzʑBKS)1!H]|DE[Jt{м3oZvF$1͊Y V-S-[UGW4ގ{,,120iXm`06<:(z,`9UoˡWAQ|(=jdH< D08SrJh)ƭgu5RSm5L5i9rڌ!>It=cgm3%PA@)y˭f ۮyH?75h &4<b[ 0_n7%$~[12i ל)ZV6=!y0e 3>d|_^}#fO'j<\b<C4a7ZV-yRAR) ʐ!6{]h ٞ*m=MLnW<4K^aL}X[84XP@C+уs2`"0rVJQ7{l>|6wqaEQ=Z77rZ/N3~~Ь\w/HѲT<Ԋ'&fDZ^KI#\t}; l&{ @=MqoO6f<7A tHD.L/3|^%)|.2n{8Y6,'\!=ǙsOXTp"9' FW|^QJUYc[t 4YZQ7S/Hѕ=6lW1mnO:r046f#X"%؂$*?Di<_z sdiqWExv!W1w.èٻ6n%WX{W~[l&+ X\K.I9q\ ̐%bO@M͛˦FisVWe#tV-%alFM_LIqVfZ =ՠ 7.7=轾7#ex÷Wuso @S^:RZ1^v?8+DEaxdGq͏H83Pj 떃ɸ7T^JrBZ jUi$`M 2F ZBX@]*j.LyJaF Xb*)4ZyicŅ*u쭋^ eR+ f @1P3C5#ք c + N]L7|ab,U&Ph" o?կkY`;P2JW tq4 J gֽ@EF0P1u7|ȳh@%Δ8d~1cd>oC=/C6Țhޣ& ]/6S3 ŀـ6pT!/C4ؕav(A"pHϯm>nRr_lKHh$W!xL|GO3_q` R3ϊAQZE )bÆIz|Tz|+RM͌R_?*({O]VAS>jޕ"j+1?ԟ )Bjya%8f;EX<8)EXQlD:6BSXS ~N[IT%})bV@=Oh,Xrg8g{-OW—VTG_"z46 lETH(dbhFtL* KIMg +C%()L , TK̡2ZدR}j0@I%Pq%Ucn/k"l4%TM0X%JH#*xq/E]^ qHR6Rc?N'P/zx/T@Aº6j-\BOjnϵn-@H(~ȱ^j1fܱD4tzۛͩxqww#S%D!zV7ӍhQ0 9ʛ jsTK9u>r\t>[|R١mT9mbKAyc-ya!OD7lxL1 1ԖlEjzᴋ-ӨU%2BzOT)>4:bgjU$)ZPJ~V%äʡqZ1G_ \MMq@:W#&`<V܅⻅Ak|E$:)Ѥgٻ*{Jԙ^ Կq퐙߬?,cO,~lq=~{kn Wڴ/orRfwۜT5!vR{$ZrR.z(͊8o8#&(qF .ڷbh+HQ#e[$1Na$>JBRZ48JBR:\´9aZlMv'@=fSBw7 RYwgys:CPv BС V 8Oh F'G!@~<Ɗ!J$WJK~N/%%BεMz߿a|RX>T0H丸\Oyɧ9 5Wp"T&e3nTK4UaZcHxSWZTAzeWF_MjC`"qЊsifN4) Y^ozSj]^bt*f g5 H'l: c%G6.6:Ʃu:UWOb6krK~廟)]}|,sfϣ[cz{g?_~~~F|1^xمz ߹Q7hvB?ݏy]}sd_Jݻ}w$BA8W\q;f!6y@ }D6DwP"Y=cE_ KsCc8݌YOo\eo]\eo]\-^I?U !M 1dH9$,TH LbJ)Y7'͛!&|(5E"'m*=XW "G|sQ0XQMl3HRwօmfcZ8UtB E0 f;3bD[dx/6}ߌӻhQt<]b1[z"OGӈPmT#"VZO)BCꃚo&"zSJ65~ ʓDm;_۶CD@ }el%aњ$&ٿ9s W1uS:#L)t,r4D憄OGD_ਸ਼v><.ҷs B9hL5l:?8Ek# 6c`a87BfH;Ǹ hdXαmW1gsL  Pykp[ݬyi9x 4v!:1>u ;9{]j.Mq|ۊf4.m# J̈PRa) fr2GhB3 @Dq%Lr͌P3bhG3b/c?YnG'cOes+Ƙ;ͯ..^[OaG|~7^]9}}rjlMjv]y˳/tnzs}b<@/:͎:GW~f%!ſU^3WO iD:alz}}uGz9[u\wc?'k֖'K 'dxXK ˯_άbOeoF&}m'g6Z|~[I/~Ǻ|7?[C׽Wog7d߳k(̑g# L.'g]LHBBn8aJ !;[8u¿U;a1tldoMrF&@Hm<2@Z.^Eys, eLoe3КZ\ү~Cigߍ֬ ?oKe]Am]ْwZo?-`Jw5v;x{̯6 4TB? Ck+wY XQ?{W䶍ֵ#/mޜkwW%5FR$~ R/ۈIc<рe]s_U}Ibt8>)'۝c&P6yg8F:cLW[@ 3.Bx`SyeC[X T]zKl笼\ٶK.Y2Ad pOA[sƻ>ՏbJ0w15ARȈQ#Emat1Xa*,$u"5I(dIk}D|rv(B/GY) cB=t7"P Z9؅;\єSW&o||=b,/(AWE9U]p=RV7CV*GRYʭ@ y*ꢮFuu֬aAY76; 0y7{:D \ rQE̝lKazRSAC4כv&~whPe¯wxf9)*9' j"X:%/VsM!)VRR{i̺R')}RT|}g"G7_Ca~ MgK7[./8ƙc{`gfJf9fx.D1$݁Bp9?*w~@-uwB$iuc\wS)1Aџ9 = !y+  :AL1Z"]Pnү|/DG &Nga8;Ỗྱ!,\l> a"=ˣ=o_y:^%Va_, 27ě7^*qmO tJ-V)_vpjp :O|;6e לXjOנ<߿MҏOtM/}7#C2TiFUF}d+[e+o30usVnV}lF6!6Tʟ@sQt𾯦Ӵ [I`9BHR1x?a%9; WgjB@\ VK'wLRc=0@X{ H&Dtn }-#]9X‡sa3dH~wlӳEo>"z$Z$,1,[fqBccq^g ruz}?JQQ'yږI*aFSZ+m \Ms6oywЕ\BIJPQ3ʻk6뾂.\!A!n$*ֈPc#fS[=N)?{k4֖Ax Og# œ]3h`J? KDz Sy RJ8QsN* ",gKaQ̸$0*? 3!vC @ *ru2]rMCJb]ΉL֜ԥ(_( $Ӵji! $EsV4-Fgf]uHRvǥ[YWJYvJ4Sz-Wzc)/BiEgl7ilcBQ:& '_j*t~jEĎbQB8C nikrY !.1T,b1#Î!JE#mFcO#F8 ceZ"MwuAIO-OzI^YF,U6 ._XN.u/Kض? ^L{a&l[0M"(Q=qS9M/[ܻ1oO֎n;OnRy~IxT,j$+R2V@LI N:hq4#ukͫݪ\DKɔԭ_h7/lB 蔭vkpry5Vy[h)94A 蔭v"H3ukͫݪ\DKɔqh7T+Ve1%:%(ݚ[kv+ڭ ELet/PQՊiY N:h`@d֭Jw4vBBrX06[$zNqz1&H7'?$>qL (z6=&s6jU~qNk?IrIb-+Jmwati|t>v~8Cp ؆\ra}Ӌ|μ|?d>/2`/Eɜ7{a#_{M,~ٱ-hi9:ι5fF2Vg~'+Cg@xaAsj Gd|Mn.N䀨_R1L!}xvg{P cJnmiǺ]Oc+` 3Ip$"VhB2[#}*Uߟg8Մ4Q@+jІe%8#Hl44Dj*wτ5wsХ=ft$5ZbguH‘vMES 6@b+ /ZbT~1x&xtEU sQň2 +.5T$XBT ](xKbb9mVN1`NSQL،|p/iX&fsGGXk12҂qP,\TF+ gUBCET yU"dq»/kB0ݥiˆ-D3b[kP)amdM"x~hGKrg>6S˰k;~X=]fz<$54Rɿ ?}kf@iUlǔ*E}wLC~?_Q/z1s/_o^O>LK9L}c??c4q7Sw$ExS7{5 ?W]}..f>3!ߒ7^ lyxQ@6{L=mr\`hʦ.=yV`I*0KʼngkM#1)a !i~"p 2Yg")BQbHh'X^1~aaI%q>Uǥ/YiY{k(s45gf_jHe~ruňb[6-FIJ[u[Jhm[,uت} \A \MA&TKؗہX"lL @2"(F1HF*DHD.vsʲBߝi+BL֚|E䤲;nXbר] WGLt 'r<ªf.+~ki6T +.:By@tVA2[5*Q i2>MǮ[Q䢾 fl < !c4ڒRQxkݍB[;8Kr^Uu~$NxrHw \vMK*YV$gs COG3xkYdxD|+)vA> Jr. FDluqe8aXM"#kf'"dc-`iXLXQyh]-W= 6g{g)ԟ3Yc q;kfHE2?&6V8B`dO$`&%h "K+8;PeQ츊^cic(hY>0n.2~_~~OnBdl'uRwf,?\ӄsM!I^.WIUEѼBڛ>#c09B8m5; ]'!>`.̂\zt%xanͭVj2цW`9Үp;Ǿ Hm&DTFs n:Z 9JKCzhsPܵ8B ([W)Ȥɚ\w+uUU9\[5ލ}7:bDyoW6wen<}7"lUU9$G<<#X k|ݩtѰ_K'䪋`@<ͬwmX!Ko_Nnܚwy6۔ݮ+sn+t{=7Lk7{Ң'zrK_dP gDIN%NjhfuTdp>܆AzتFk8B;ߵΌMFJ>l5ƾo&b}BC佗>kE"ϥ~<ŖiN" ʬrg0Gl-zjɦer]g~k]iLjWUlx{ѹAjFIGᡚ 򑗴hTixҦ Įu>,m?\0Z4]i3ɇ:g1 ϵfn{$E(?6pe -"<wteH:`a*ZʋUU ¹DOtE"sJ¡U=GĈZvZuVp ]sXmpl[xZ jE\ eh%^VD ȮZ_31X9wI^0)Kti A]Ե7wmKz:لv/!EIf0lcIԐ/s~IJjI[S#I&I[gL%8Zjf0|UFRpq7pJ%0.뻂@HH\ tG|#.ʔfEk9d"<;u$~ VQ#+o/c],ٔMɠ(5(BzYd ^Jt5#՟Z@SOŋ^|c[~ɵ.]h&~iv)?=I4hZ>gL1W3DEIӿ:#+tEnګ\zJkV5"X`H.fIa#RU礏2Җ뚝IeERY%W34oSֿ.`QknϨ }M|C?R`#}JFRyO} ީ䦅+/ }f3ͷb_,>oK.bp9. !Bq@qYrX GtE[)fYX6VM3PPTTdzvlDBo[/h,#g 枿j6fW7,FNJ?d;io)|6'Bb 02YJBT's8.$ku&ޅlٴ1E)){lƓM6FoSv %3i( `FLwW˷[1b) ECe=a? ItnT.%TXnR< F_ij֤Cm ~-205[>zDz[WS wh>9Q\~Kf0*SXb8H7xUV.'ngceg/#%9p8f^ #Fg#ib%1KZĊ7GC V!BG-L rM#ǎ;O=z16i 6}C%*-[>${V강F f^=k@VM b8g܎ Bu] nvT0 Jz05 ɩw#~:4H-B1cet@ͤҀ+ɋ$^x8|n [ܔfctǫή_߾#w\b5/0c{Xm!.ɝ2nN fgh79҂$΁ " zp4i@ipNٶټpuCECGg/%I'-atZ>oA?B'w8U:aU'CI,NEI5E/P޽}AL +^֔{Ϙ|o-c`yp&^,غ&  14?#[_ÇaqYq;jС✀, 0걅+[??*\5}rHI[ Bf=Y~W#.eX=<=z'P 94x!F(#䎔 P*zd="w2wWp Gnq(&55Oh?zA:6 k}tdz2Q*l䤿( 1#@P2.;_%a{s7$>_Uj.`V S%ŽS]ʴ@YAG & ҹ9A#XڑQbũBba9]y( o((%VE$3fsEبn(V. Dϵ5L]'ZxT惥=TO.lݝ]_C^**A,YkGPd7,d_astan!zG[;tcmt)c#~*efkkOa\=س 8LYL^"ժ\K7i_2̹M?ưӊGS4&9t=j@F!`.~3VS f{ 8Y538>y ˚ue4f }mC6/[`BJl;o+2sA&-d )poZSHK,)X>oW+/QJ [Zv'v@b˱|Mle¿6fSViE-x +Bߢۛ4{ĵn}&Fdv5D1`g@3z8$˙[deF.to;c7Mwb/:`mDL~Đ^P(pKodzQ|$&\%m]-{chz?oN'0eK*3*#p꺜m'$V;}%ji{ϴR[N(h Qi%/ w A vG6blU=8966Jtu)B&P>g3NBDl4(,)w ^[:XN$2hoз)(,?yfzmGV;ݟDiZ补TiR~1f`F\( 5z5zGYʿ?pd_!: btyd-Xȉޏ]Cow` oo0$~ >ݱ}PD= ,FXBLY@='p!bgJxcMyҊʖ8x F LN`JJ8fDF3/7!5' 6<g#٢:t)UtvumU@^>E ͞jN0On~x{B?9cѓxT}- $d:X:p v|HWWQT3JWM̥(5' )Dmr^<|*S5sšp>BSgD*~|NjI0(zf?:54{F}|bfߛˑС. 3]l F0 t!kxLd~pV?t'h#?K1YWPNЭUղtN_2*xk&ڨ{2Ѹū^-CiK`z>t(Qnz j?NkҖfwV#6Y'Q :j56}|"ecyn2',V6dSp'Rh(yk.H?s9S6z[dhܓFņ><21/[ ]E$SCfot@p4n١;h]sp91 =Gy*e(w; ! gϮ[QhL:{tY̓Xx?:e}̻ Q{^mD;\#smk,ꙟRn4={v ϵsm>B.{prKF93c&IJlFж"AGwJǁ;UhA5vst5|tq4ZU.k6^>Wkvy_R .zEF D\>pnoH\>F@̮ #@9>^ab¼SWCu:7ُQz_;q1өdT񅮿=\1Ub1is}Sɉnxqw'u1Y-d2. ȏS xV2[;&fө0"@pGs 춉(_v㳹UpZJS4@Z5GV>N/F1WLrj%"F.#B%9Z'EJ <9r|gsȺut{E}7`tίκk#hKݢT0 z:V;P$hŜw%ә .la|wY.@{UޗZ,4˝{#_m>OG*X( }aly$ޢ bN.CRB[*N@oawѯ]啐_?4@ sE4ÆU4I+AӔ5@ݞGFEʢ5ˎSaOT=˩?lT>b.ցY2!: ڋyT^W8 )4yX?fǧcǵ(pB1dL3Aw^Pw(zY"i 8%tɮsy8Ȏ>qc~j 5ٗ0O*C,ur=|3(r>mQl1aGZ7k8^qO}j`ȱg+}vq#D0hUþƭ&j!½cZ7 ԅav$Ѵ+H1ݜVс^*OUA$(7h 75IChQv v{s\a);3}$e9bό-E ɟ.GS?hbK>It(S|v Rr.Js>߇GB·a,ǩ춈HepObUZ){v[$n3n#n.~t ~FXެӿ(ҿږ뇽jʃN*E߀U(A#R:#6 T_"q)׫vՅgjPINRYER`zZ}xY{ Jlϭ.ذV$+JFDt wxz>2?و7 ě vDoN,M#-5<}^G%gPo¿3?ǸcQ߷n5WDXW~@ϛ9d!QռVݕH{2=1߿xcs3q~GN1.:% ГA `? Me?zTqޞ/I1A$ n[5+]+vs';%^<&(Qrl͔0ym}ebވd-^nP;;}mj^ H|%vB8@#I)l8p?R.=,;Ut$A\!ay5w=,[WK?xhJ 4j?^r3_/iSPFM^}MLE3r.*J 5FBz Q f%V5nZ?ɔ)u6vWoOU6T4"ΓQ"!G9?qlWr'  XUwr7oy[]Iy :P_qұ "_AʂWh>X=}ט'{ZDYHXC!4TSJQacÑ)Cޑq\͑šA8y,PKG~| wR 7Tzi- ry/OK'F loI*a qO,uJ)Xŝ 'JKD)ŀjo@h":$h1>=t|\ͨ\1eoBMT®u$5QH!g@S+XJ&`#X&1d nVLcRQmaZ+  KuʄWZ;MS2 X S+qW 45<F` ® B=T"d, ` `35''@AKB. \=Hfb6^/2Wц! BkV8XURɵe/6hDRӀˣT`.S)94{k#UdcހЍރZFB- Rؗ.$P0d LV 1ᵢIeJͻ*ZOO汵TΪצx $LzCdH K2 IȬ[TwVOl`u0\ Fx+X~F@Ҝ4xdDuv|}:T,ŶhB$dvoe7+*l`U2{?)b)qەdL7Wf@-$T||R rl Lnl#B0ᥲm\ @.!y~sc=L7\ Wu;6r˪4rܸiZ~`D"Ѓ)?fkV%>35b:3;2(?poI*FrR 'GXgTTQQ3] LUl UrQ; *+N٧ I!,бpJg3L)MM+X2c:EI<Dm_E䉚W'g?D{M۠G\i-/#sN1L{x}?^x\ʝ?2d\1,kOLJYqG:a^Kb҄a*HHbV r%*rG~-2y*2l haO:(o%Dbٽ7_嘇r} XGZ3sή֘$IgLhɳ!dghU;}:a}qc$`#Ɯ} ZΡlеŜPe7YԶlk VNkySo!g+}>! ֐)%81h"N!f$tO!C$ԾzFlLJ,vXf\+R u)C@ `RKej7P Eyzr;AOC$O'.*XHtF1鸢 r2 dF4uUBz'VYnB tE5!4"^%_bk,'9;aC%f[zvYo:|hb=ȋN:/!oq__ hVr[&Woj˵f"wө< y`ra ^ >L}5J4dFjnO9!Vj';sbř>X  2$J$^@_c5Poi!$.~Ix==^bon38֋Rі][oG+Ξ=~ V1M0*3Dl'AV8Ꙟ[$F Q3쪯%%fz:Yj^<s.S;ZO"Gݶ-殈9ƻb /\>> [y唘XnƎ9{{LW-rI%F&8h;a^ :OJ?bgeqdX 脵V,^k-FH1HRFm^kV߬8=;Xf/SWz7f~;׬dr6=V29;*Y{YMȵ "Ygy!`ýt{+>F1ەZ뙓G ֝ܤz \B9kZ9 cG 2ӽ Օ-c}d1CNU^u֎#=/!>Wydmݲ>Vzcu8?ݻ1Dh'7Gz|XnF='8у,+-"0zJ`³R'#9F2"cSlK.tʳi!\ַ;өwj!S*l )ߩa肷X+-4]£BU & xR Xh"<@yFۂԤ6w=EgŴ]+}HqKjTm6%ZV 6i'a>?;߸u햴ɿfoV:>,f~l<~, d:I(1G_/X}>OXip4jk:U̝"<y._@ (YkOtQ[ҐguRD*9LB,КrXzL. Zd:bz>"Z!hSpjz )8;C+FK:kAO]2 W/Z{V*I#_]|ۛ#o,Ϭd F =ze?e2 D]T^pDtx* L&(=aʨlS 1lG/ژ"h ^y%T(c9C6B566 %x-1 >Gc:Rpuno!f돿 T(jb ?12 NnHTpE ly49̄V"ADʬECAZ.CJH0=5 ٍT~ޖʐiCHr'u1Ylm U '`1,uX'T#OcN:~`Q=x= [LfXD**ނ rDEˍ3Q= 4h{~Zh L]IӚ,k@YI0pj 'dbS5p643A#Hy`iR<2MRJ{Τ'HL&KJ uT]{JRˏWҍq}W0?D{ vyl]Z3W/ l) }_Wo Y,Wz'mERdBan-2aRa VB9; XV\-cgJ+Il g$r#vm"+IU:G'e`kT\lD|°8axCTΈ'DKs]]0m ʯ?v KF W(<O hqIc :5SpkLϹik@HK4ZΞ>ZLk"X7cLbTjNn =މҭ{鐥ZQ4MZvQNBk%`wBtଃC "X7RD"BF4c=~za))E79*=sҁP9D[@X^kqʱ>2?axaAb5I +٬m;'PG~K5|!\.V^:Ъ'.o'q>:4>Iԁc ƥfprvp^29;X_e>(cMyb~\,GdZDi鞲w>Mu}y~*&858+A>;c|uKqZ{֪拰r}hڒֳ=8[)9S:F6yW.tm݊͵nmh3W"`z ֭)u0\[1֭ y*ZS 5[qJY՚n7^ %AKƲN0D K2%S% P2E!֡`F;sAvWmwOz0Go)wfմR r1AY)Ƭ7? ߭)u2PDwQhukCCnx^\פ9XKՐjye1bNQrPbCQ2TM4s z;1dD47cyj; E* (+ jc(#^Cv?F1z)o+/۔!\EtJѯ$֍dpZ{yJc.J޻hACã8ǝR rTt:m("ͺ3kАg;:E2u qO&'u$Өngݕ6b { 2o@5'Ink'N2!zJ/[/*e҄պ՛1j0u E"+[I:AF"际"l۴ ^XUyH(.I,I%cd(ժH*{4\6o_&.4]M8 zRs'b+JLKPkJ|gkJ˼ϔ6U=o(FcCF(/E]뵼]st|dHj?COC8b<~KZ%&$)5܆74ѓGf{s-jtEIQr @PE%?m$]ަsfH#zU cQ@iDoƅk7yQ7QZu1j ?;^Kq? 0tW.x[mc Iʢ7=a?k';>Iןp> JszC>g] 0F>k(#yNR;F>o9#yfV/@n|޶1sFh$xQ}ۂɫ7c=!jOgD9PhIh/E$ǿ%N{|dlAEFp~6`3]כʸ FT0jfY!.n=KDeEJ 2K.G_֐mE&K tLz1笥^6:StWtgR #qCJʀ X|Q_dɼ JcA2"U'8@pŻB KWfHFKWlteWA  ӮHC7%\R YNt9t?c a ]7e K^#v9]0qH T뒧7%+Hd&(djqÅuT9Bc$>zP<*1,ٔ Қ-ѐeҰ3F]) oƣ2|q֩~+έGTnQsVa˳\x?׹OW&Wha'8sǀ1JL:mimK);NS^ҥ2sMp]2GYɨ-V"U^:(`#BJ&`C(KUt3$pR3tP8z4gBr"y+4碠 щX \нdT]7PYqxܥƦ !@VcӦ(hFaw;pl>=0ܤ٫@4m'aj,Ut l]!䩠3?ކѰxjj;e`j&6Y6<ギ=kW.rm|D8tpЉh]c K х";5,֖aHNOo!z ,J>kZPئ#ϊm\@Eo%PU2i 7u*Bxki< DqW2Z!fɴ} /H"){{xgpVzZ]QUfi5Eo,r2r%)I2||:7w!:xM!T2L h€i€]AzK%1BWMN'ݎ w&o\0#aG⁦JV뒲CdX@B)V/Zn^M}co6uة`?;FٮOP(v Z!:q-t^JJo]hMmp3vصA>OϜ6@%ސRdچ24FJ( rwX0IW S{3%z[+ʛAnG㛫$7;?UzEԊzV`UAJ.$=>˳Ov|> sF8Ͻ ^;]Cּ22DFuD^T/`]HcdS!F,}(fF{4 8c,]d" a񌖅9ʭt'aq9Q[AS(SR6r9Rq``]W`AyVG^rܨb !0z3吃U*< r0_o0lMFDZiaQQJ:a4NK4ZA/jT")岤HhfmZ41K2 eIKőd~\ uzЕ!?VCpT(EuwWqcd2ǵ#hafNP8a͹u=Sʀ2Dda1Ǥ%0Ʈ,7^o; 4U3m@k r2ќTf~օht6k HT+Exc=WB96,m7n/^9֤Jw9щp7pqqql"z$b4h=R'8 KqZ\jyDƳk09X?x) "@]%i(\ KU"\EC"B 3L3 ˴iJgՙD&T!kJz9x]!s ޯ^8tvit#Ոu)ɵ#WK-ueG^=z]#>&P^f5`"6 G\Έ5R*݈ϝ);waVIFP^4Z Z0)A+Ys-8%-]F5]HWq~|B6iId"!2d6]#FE\9Ӏؿ )Ѕ;u]I+9>= Xy. 2S֙ 'b&EaIV6rzrB1}N+;ўB2JcV;ǂe31xf!^ Uqꝿ5$V30(x ~.gYVȂKXYd&sTx򬢏 V)1}F@ )зa $b ݥM CSta;kTMPE0h'աF ZJHI-J$.&JCɸ4B1FqiP${ 'IՆ2Gb')<ܯzbM;z3Y0]>> DU;_!a7bV*)QWOFuPy9ڻ7޽ Mwony)&&ϝ ys1R+QCbN*08lING5Z}M;ߓ)MhyіajGnR$k`U<l{8ɐ=\r C9Kwd2׾`X Q*_weR%`O`U eTQ΅zs` 1+U_9͵{FH+\`1!/H3" Dž9 C35ȩp--BM 5)ċь^a[xAMaETg\&"ÅF{-a用0͐,}j I|muܽF?ބtr|0!cb2D!p=z?]h*?m+)ZR-yjᶟX y!ԹW!}o_XnUcv%Yx'|rI_MbRhukO9HeߚB(x2}ϡ0cBS0@Nmn=&՗fa"W&0^s\j&DsBkHP%Lc"(NE) ? TiRG5#1 le=|XZͤh D’ROǤ8\$(aJ%WR))쨇` +⌎ҫ#~-PT&~Imb{͐I(;VW3wL1[WR+i%y$Xc7`hOzw~bd4 an ҖGo޼KD{tëY!]lgasp5L,nu^l?ߜC2.7%?9괰"9N$h;"IH_Y_W~jwRY `oja !rgcxVR )A^ƶB9@`xk~/:aA>Wŗ[FxA+xRxaY'vay ]X7r=g˶5^#:{V~V}|I(+4D$ϟ, 1#im 7m?OaR|m{tfz̲e| \@7X,8OO{2愚x.2D>uAh2C \ap v \ܙŻߦADz̋UCu2Eqa*on p* m~mv*Vrtbx'TGu;db(y8ˠa/UxG!d<|wi~Q6aO[sw8xp1|?og/+xu3=nF1,R̍do)1h?N_H/sD-+n{ͫO^{pwikD1GһM )j|LJfЋ0w$ [o; z9VVa~luV^4 6Ӕm?ꇴmЦF^l:'oĽO/G,mFm#ꕔ=9]=CUC\a1ӫ/~Vf.& ;I|~9Ü$C ~.fy?/Թ!b6m4&}Ǘk }W9anp$JJtY^xh1띋G;_u}AjQ6n'U2xΙ<>S>2^էΏ|ґas I\bV#;Ss@47brn;~/p}&̛|2|\Owi(.2*Fz ң .zb\euX:ud+QD\YJ]io}Eռf\#\6ٸ@!&;^!/[ůVWɈ 4_2:TȊBra](R*J\%'* Qpl%kc&OŅb9Ux* *8D1tUQdžrk 6󩫗ƴ;RE|͵(*jWA׹[A*9ѺVRl=W)Y!wH]Z6:|PZC>.wȮØk뮃Z;C#d+U; /EaѤkd#wS+@1:RI!TͥgI!в"=q{3y#)4w8Ay+@$}1#)m`p^_H h^}"ZOz!)n )aV2Iaͷ 's:$}i0Pٞ*M_>)LcvYn> q|Vw`u4V4;eJvI!,cm *ӟo!UOL'1 LZLZ#ղ 䊢!7(5Mvll(2S든Y$;p= w/JzMMM.'lixq$:Uhs! jj6rBRD22Ryvㅝu;.>@oD@cWq/>e?^[>@~n;c$ ٍT6n>%ЏvؓgIP)u X< m2YFXtRlIDB q;mSvO~ݸTǧ8Ipb^Nan}|i;_ ~R|( oopu [pJ!YkrIW@J`+ػF$W |{ǔ>hɆpxB-PEJ*I"ra$_鑊*B)qf^8`ϳrQOfo WfUtVYzY0 \XAhXcPb[X??C:X6/&2pk˄ :3Sx4IC|D#Vƫ:GVJ3O )+$?ZfaNS>?EbNrZi6̨'tД_n^(>嗟UL/nq(}W5I}x@J2H(BaeD:b":0A `qb,  xr M $^ <:R9ƎEd\J䂲Cai@`ѰYē7f|~fvN;Mǥ68cn4 ~u$]A=0+/g+ӟ9D:_&WJ)d$6Db0_*$Cm,%(#xHQJl\{c3w7fy!ZsĹkvC`^Bl,SfN;D-A6JL Q3" $h9*VKɶR]߅ l:ȡϠ`r/@G#YHy0XG=S-oF.JOS9T_=uHD@3)uV +)c[J?|RWzIl *gv^ s3 1fl9NzBCЏ$2 &E@pm7C #Y3R!i7cZ`Ĥ|}XͷR×i Z50<Wi_. *nڐj]j} hNTg/AO5Juj7sZiHnڿ?Ր[UV$S~u]a5[sҩ}O[»q.fA-c'Zh>ܺ[Ϻޭ 鹊Tu+ nrS-:;.ޭ;r:BwvnMhHU4K$9 /}C_{ Ⱦ,TznV]o T ͇~١_QKXtm;Pz+F-zdGE }xC^ gGagkj]; =CO ڙ@@rzAî38< }Z"Tu2jCQK`Rw:Ԙs(%s5fjCQKՕ3c<ԘF\1Pcj̍ZGs5f 5ԘAXs Pcn ;'13.jCQK{Wګ13jCQK{w[Wcf >Ԙs ؾgBXcfBr9Ԙs Em,1j̍ZޙfYRɆPcnݫ1K@e15F-Apҽ38᎘ܬ%h3}"CQKPwbY1ΆPcnv $jCQKuƬCy17i cٽf5ܨ%0BWcxC5f-Y!5Ԙss1B1|+o '߾*dynEL 0űUenUPck虁0gD Jt*Fn0 .R%DOyk +BFk!%W@(\Y>K_IVқw3z>}!܅Y"^>2[^Oj<̮Z}vo}1?]3?**p@fLoLfV;8OCÛ|*+A^1I0ؐ7! B䴄zSrO1 (=Bp6_}KUg^h@J(FRM*7+e0P1- i#  ϕ~A'hYn>]߄e;ݫi0i]u;Ul?Xɢ!T7A461!(xQ{zYup{7Q !_A`FT0C=i,*AAX0%NCdD$c2G>>`項 FIC X1S!*[ ȗT{ETqGycUIeD/#^'C1Ft+~ro)$#x ߙ>[ֳ|>Ji $?%' x\3a|t-#gekT?!4m&a?*AikQȉ{(N9GiM[˽" `jm4!4B3$=OYb%ޤXxC`'"v # р*Opm7ZT =,2U/u)@,״_.O eJR9_/nn'%WK?au;5.b$Ðsq 18gFRiILo-!P3G%1/,1KdZV}pX™YaæuR|62(L1 Yx4 J^9}4m Kʸo)%sO͵s|,e)rklUl!QUۋϿlSrN5Irl{@&v-u׏a1^̧W$9cQƭ6K# : ?O~M_p?:4ܮލdq\PClh1fx31F`!~|OKy gIW:çqii cLcEoJ0$K?0 'Nijت7T$BXWha5H(RI{XIb$9L$gU ̫ElAbFE~-k7߶'=Mt!%Ĭa hK)d,^ϩTsq,(ߘyzb)xu aSL?Rm ٕ݉דt rb71\%cJpj d>fIANb8kZzHV 턍 s\:ZEj͛RPgϳ2m~(6c~@gś> ޳M$(NϷ@L|ϴf3'= 6:^V'Oz*,Fg *ǣYT V`&oj_};W-SA NRٰS l-]!lO4k;!M ec=_xUqnRD:`82Pӏ56\V =oR#c3(^EDTZ3y1CutiA:lYfeUZ©<P 8015,Op㮭>T޵>q#/wڡ[uΥRWqś͗l `qWHʱCRD gt u ȓz}W`I:5ZOw7B'u&F ( ez9ƹԐRņv R +)/cɑđđđ:Go9QR [t Ҏ[)qF&eL`5h}ѶVm;_tk\m΂sdYpJ#媔!M3ބ`ӄt .qoO7@rYռzěܾ#P@pN*l!TK=4.!!N“DV:xbBD᫋pU4 N:?[<[&&ӲFj=hԔ3n#' ;% RPqGA+G$*a 0hlo hE(Q8xH(U4z D]RT^G' mQ_[[g6 FJRFUXd%[:FeKgq=FsH3GV5]"C8+o./cD:)x)$ ngw7fh\fw4EBB+/]ܙ%J VߨV \A/O26pR,w9L1_ޜ-[!{4)$JR R80B68KT&Rlkˋ̳"eΎA5ZLZ!j8,]5 2D-/S"(EbKg,8""<;m)f֘T๬:L7&M_vvlG'a~,àYrI@w.(8cNٵ󅰻wbo x Fo Fuԏt^8k9m=9@ℨFa':N'& :`L- SPL<̐PA'`4ǼzJß9C(UA @2 KjKʒPFO.0.p#^?(}4E$j#PC\q-\}32_(*4~aZ?s97&x:|T:G?Y~b1$!҃=8Th)~p|3ڻI+20 " \roTW2cY)gTx6lC4R)XlE!apRy5m⿨_]:]J72 rQhAS @=(:"DkٚMR}q%T uEQ$dO7 a6iaZjQ7?;Ke-qt]h1 1Ζ.mʭ-YO}KA$2×F>|n5HձyWS^{.oo?" )ܵH ;2;q4(e)D6>[΢͠Y=Xs0dozboT꧷))JVhq2׭F-#3ٴL2{ϸ!b_[ U>Pqh>>YWŐ}-H0մwv\{ZQ\f`^` ˵.3*KXmw,{eu4Bx4*7wtVT0 \f+YhgxPT XiyH+z_29-gQ#2x1=V~|'_eQfGDS_]|em9;ɞp5N9JQw7<]X'I4+I;X'܎3rzˢ[GyH6#G~Ic7ZgTSӓGI> /} =aR3H΅'8'%]Zg5ωxmM1.JCU\ k+JacQoxDO1S:;v\@jsUL9?`s;/.y1Lhz&LKÔɦՉx +aQHgR3ݨUs9߬8[ U+9>Ldz(WM=42'Ӣʊ@ }~ !olMZ=D}kK#zZbZ7D usŽ r:h9F8ht:Uك`K~yj}|E&&e?Np6SOHYϡӲ:(JzP(h'30pCw{Y6y^lMCƒ+9oiT }Znά="-(_g`LP8I@DO*{Ǥ2\K0k{T*HaI@ )ID3ANA '?FdSEMpKɲ?/iu23Wt=yjSO :pB9ノq?s" քT !N !ҪgՖK Hk5T#K4C$s׼Ǣ?M?R9 2(KQr%Qr՝s iK n"A@GKEqF?W09}E < Tm;Q O9Uoަ"2:MEuF۫;":0/r4Zى^ːQ IQ ICQIִ')ceiȍ0YDx _m;jv~8?3M:|1A<ĉK3KR3$IxΠ3)Pň2=J()ӣLt]kNHqcS4[_D'EtKk-vJķ`F Z)Li!Y5W(Wh)*&'dd&RZ)\4SF*DrKNX& ~U؍"M3v9|{U^wOm{ %AIbqh,K5tQRCGI EAyu-)QR58K,~\$ &XBKˎi,-;_,d a'[w:] Y OsDCKEC#$ WԘ %%ֲ`eRRԚFo,I!BhbK,j->t[qH#SZxbtT޼~~׿ OĠIEFf9TDK4>'6iCѳGdV t.|s6~9[Yb 暼avweqwO;{K]O`l8їpSf"/׌2ƺd+WQ': lZI9ZQցJ`IItĵ]u4?%V,UuX$#<ƕQb8i.9 0B:IfA68O%j4hj(g2 7Zfb:D_G`H/I0*r#Re ѭ[T:,.*w+MeۅTb|{ΛoߜoM3p}DSD Ɣg'6t6R{IyjMܛ9(TcCu h.a0#9\GA~A|WM \ZĄC$ΔNp30kE!ij jAmûֵl|`Aqfn{yA"$tj{l~'%Uը٬b7 ;̒Msq]N'|$ bI$Z#'if#ڤ -6"a !d}[6C`NF+.9jdAb$#JF@VjHx$i‚kma>1 .& -[҆cF((s-p(13J C>b!1™p( |C]#nc8)hpKlR8PlZry*dvV 嗎.ʛwOC)̞ouz;We<>p}7Ry[]|F7V*uΫi+D6_4 zy=P@s~0B;xk~5An_faG"~&>(Ƣae777}׻U:3>`wojICqMҩs=Tq-I}G62?hukCCqM),/fa8lfd9tΘrJ0|8hDynY{,VrypHzxDEB6$QPIH ,C` TP,#٘B4+^Fjє7j<5da6Ao$ݟEsItv()r%1"ܫuэC OwtdyW=;VqS#=+;LuhNdR `!ȳ{Ǐw 4Ϧt(zK,r bnKCpGcM}0-|q֏=Xdfᶤz3fpBga{5wj9[-o_5l%^Ϲf2ZhwD>9|VB?"h{gtDo((%{ϻzBωf TBpDa3E%SN,D'>yC;XMH"rjR=Ɣ#SU42E*c ~5_f=Z==,?U1M <)n{luW)0z9Zr,lr';ٷ-&/hRwq;?S,ˀX^$c)-+)Ba<= %CmZ,IBzHV矪RHe+1]Zq<~Y [_襒J\kύE +YY8/rL.xQ*yK1o-ݱJ-h?eٯƧeD ;qi"Q"Iq¤dVyp{j˦ۮl^U t:vRe9΁(9B; d)ޤ+ICo#C pLJiZa-a9kGGI=ZBB9煅V9qJfI:<.1؝;LF )_$!V^(%q41+FXta|NpNc^#C %_\R*J=E[ IFk%.q+=("ac`|(qUHFb(P򫘍(4rVL rOԜ8 E9Vp`M`RĬaEr(O!:_ l}^`(!B~BaJa*C/W*Rc4B3„daM [jVdžS; ֡H3`cϙ 4f"id^K";sD+1Ԝ<l襢dDIzWVUR:l g-߆p#Y9#2zy΃YM^MqORT]׽^%u*~%Hy1KCTQO`(/F.h43E81_C8虾_98+;rVzsD|z$xOuOσ|g(❵ZW 0&s)fvYMWJU͠P;kȶk[y :̺0Ey _4o պ s^cgEQ_2 :>ΔyGqѐɦ. -[/\\mQK;ڍ2dӈ-.cӈ-jFlw ӧ{5z+!j.$X] 6NGx8w104-;rwX{Bu>S7z'71s\Ѳ Y֜CEB StWޥYOFK]HrH< [ q9YM4.u dKߧVr2X4./u NWNA# s݂[3(=]uXbeqs,W<1NiqO^"x% k!B # cUUɊϔi%ۤ*?QY |}urFR;'(>iR5$>(41*T8~cX4js59ZP6IWT+XKhAL'w4i<2%J]7KN[#%!߸S񠃵0lD)@ǎPSsFV#b Ϫ Z8wUn^4:ZQw1DbP!\uYrI8 <۲o <ؾs/G4G3#ѽKG%JAL~ݫikxI28x.݁ ,Oq/`m] V/ͫQ׻w'0/43w&s>iE}.\#ի, hr>_O[d%>GurHk7!);^QiEr栌g}A%;bs&Z6vE;!߸Wa+{ -I}G627-Һ!߸&|кJ[*1&m]7ޔ7Һ!߸TdK.C[#RO;W( o5pcӜ:0L>UZ?)q D{"F4s\(((5gF$Ln6z'ݟUjNz"&iLwP<O9 h@=.wE`%i+%0)S/龺!za* KD>7eݶO%v2a/%kOtbCo\{9N/O"6VawڑJ<@˃ AQIg f\6q^&,jљe [ |GgG.z2Zn&Q63%/O8JpN}6# n?t:ѱ40_D(2J9L& 3)%5Ɏye:|VVzF x*9 coIiS ѭdwwv:k@bgw{VN:OFLbz!eaE&Y3*d#y5S<͔iM/kx$|{7K029w@ 4* l/?f[,-&.]ϻwU<s8e(7Z`l rѝp!Մ402[|ϏnП7!*y-&!]/8ۏ~(_~B+ʸBLb"]̈́CghvBmX)_ Mfs\ D3"f)@j|&đCNC(4v:ךp8ќcbV[W1 @( B=BK)ǂCJ5}= ұ"U*šk_'fOK7W$#\M06q_>n~ \,ת\f@:xV%=4ېtK5&r3}Ȫ*drb<~"̐rȦ>˧ŧyux%{ωo^=PNB9lc2 (5ØgԐ rQ b5Y@2W}&IJZ Ȏ( Wil4jS+:% lSݶ Rށ[a&1Nv7h B/ͭ*cǽ/`O*0X̑yaJ* c+BۊTo6_LWHхXEʱӡH}GEʂfPcxVԸ!u'zqtO·\1y};mln[ގhv<^vn~dixv.1OFPj3]hvZ Bу#"lso =e5ID2V[J0 9ŔbKSiJWzCZH_2'c/@9DSJUK 0ghDAڜX`_$!mTf gL8BN.9E/]瑬1H!^=V \I$X+A!afR0,g e$JËsOtExBm`JG2ǂc*eR1Gpm'<[|Z,+j46gMb\t^d78 \B~fˎԦȀ!&Ŭ! w4ƔHc" eꀙ]~?kXYwֈxJTH%_0!hقJlDkl&4҅S2][[7+ ?` EXdl] v fqv܎oQ-nn>:R˖SX7U:W_cxɂCÓU|0uE>{776z&PoTe*P`%q㧀I'sH<#Yu+e6ǻt{S^֞nw|ܵY+G$qVq)7ttZYyJD}^:/yf {lU,tQ7֒b1,p=I ڀ=J:I dCk n;-}a(R4;i=85֠9}hbZ{Y($DR-€aCΚZ=Gc[$Ӿsώ6="(>^VulY_W5(|gUx^Yгl~(,4€;~),rgYγ2uR!(<a9)J3'#|3W7.w/f"ԤcW.~pHK{mHfir Zt8{RH1uT}`Wݛ~^b}2( tRΠs@;"8|nbǬ@IL1Aׂ3Qf[cuO)i7u,l;Fˇ6Jyf$zޤe:DvjK*˻vb%iEw3>l׉G^)_djHnUC:_,7-=r{d*fb_zчf6!9=8-1wl)@e?S{Di^x䛐gWa2K@ljlrszcnA#^OZ V ѱ͏yoXR'L7z⌎ F;vde呈{amVK=:#*ntI"M&!a` L#LvcgFqF08ӛ]w}SVΧQOe8 [5^^W[8'C_ƺweDzuwVB+# Wh=M,_)ð =7gܿ4/T NgB1Rtvrc>\_( tA!8'e5\BHbz$''YD)17!aXKvALL#I$IWv,:\is7WR7YRmĀR^OD@l2>c; QܕʆJ͞@;:^B2ٿMk`;Ea ՝j?aROY#뼩5 &ña=e>$c{Fhڇ EޥCB`:52? 4ҪU/ 4p{`yJ.ԛUMHm6GScJ ;zCmv{ @Fo gXIBЮIS \Y(X!J=a3;'pqb⋒N%ZŚhGdEhTZMAM ݺ•ɩ(~wi G-(`j2ْZHJL-_NR8 &ҒX\131SLhdņRP*)&A$Č bpv\{b&""NAٜTڙS36,Y2PI&9ʑH I[:S|0C'Yr~ˎZ>@U'ZF)=)b760S)*ԧ6ي@UجS1K} Kk2e1*b (xC.ptɼ~j"z4eK"%f&S=+3X}p qmhDA-[pMj&e:'ˏEzlT=.g(g8eO ;GZ@-eqr 7Mޤļzҫ@su 0 $C*@߼I{5g:7CfaO:o[&2:{n4RM0 3jW1'햃WR<^0Bt∪:mh{7M T* 8c9qΉCj&&E:i e#mV[HOIL0qb2L)Rmv]XQeeoixSf@[gy4i8v3oF;wE58yѐdN\JR76ڊTɆ}T:o7腤ѫɗ U\Jաů3sq5]/xRT_7~zw78 v)ή&oXTOvm쉰ol)+ &ͺi!ߚ5Jv}]?g?7Ö@dT~|gme^#keʚ޼YcӋ+mEKffg0 3y tmbm!8`")$^hY"]g?uw>~gWN|~_/꫏% 48 T*llźu爌f1\O=;=+`b }Z)at#đd >, Vq\HՖpv4dԚz57qRg^IND܇=vŻCaX ~&YL yh^>FLYfUcNכX]澬o}ՒsB'm;<?tI` )"ѪB ݒ %I-);`$XERD+QRwJXbFDYEzy!dv'qjNd7Ƥтzu~B>f+Md>7;.}t#$oNvivaaŸMD_c2Dz6)9+vo?-:|V?(}Mu|uՏB2&bF7ee6|*f@TXW㿔$#Vn4Wj)g4FK~ۿX2H/wB7?1.LD G )b1|^s9śWWWկ4K6nxY^ykNSu=wWq^~T_(M qa']x8*.! -]p'Qz>.}H^^K'!ԋu;(;)`TBY}<`p 4Fҝ^N1rC t;5xi&kKQYf`YW^Qf47wnq>viV i[ V{r[Q&sP(boW@Գhv H bی/,O2q$ӯd^W8$⩲!0N@HFy%dmy1nI $zC0DpfV{´H!O%axȬQPX1V1$%ĶXe=D$%dFKE]#$I?E-J!YG)V"Rd<M )GJɀ G/wxJDiM/68L׼ಣX6ӠL$]3\at{f.d,W1.+\nrhjϣ՞7A !1挫9S#/) D8K8Ӂ0ZkkQ }zSʺ _Ҏ W@]S}RK!$ٙ^XB@H#b`S4RSjqBVlDHQ"kM,y,j[ lmUKl%0C*Mj5wc)陷5A>V{dpdGpMG#łCOՊ9]0]dtWZ@j~H 3L@b,a3'sUH e#R3L(TK#5-@уZ&d ߈*+aζL,|ЛaqU{S8GX ?J:F_*a*x0^>FQ 9b{,^P=䠓jx3 -Sٌ1H1>6 O-$Oy{9 ` qt7Ao J׉W.[7Ŷ@hk+!5nqtjAH0yUdɯyWRuc3cS_3.)p>5 B4&.) (5𿎑9( ]aZ خp꾶9 VǫtwɵGeUIhH#:(Fb{[dc+!Ψya&d" S |yO&=:)Z],hy(uoP<&&k.f w_5HBEUcv_5Gpyjrm@-d1l` e(L 8z{Zuy5nM8)C?Kߑ3vBPӧ=Nz71xRN;rۄBѼ[9uwCCq-)Ow&Tw+Aꔾ#ǻM 2n4[hg,-Vu~lJ|L0πJ;pe\V,Wi#ms||uO۪@J*T83~u2_7"3׽{G=0S7=AY6r$8&щ#& Ee>Y/}82}Ej NH8guT$@44BV(+F5f" 7dh.z:mnF@$~!/ k񟯸*Yc꿋.>\DKkwAyONcS(E]yۧ{O5o'aWysL 7t2u]@wKe-?9.zflw'P&lmuyeUlg:p>0kΦP$~ %(+ .X nVPk9uSEzТO7鷛7!LżCմA0P4;t`p$aO@QtZ )#RyRa60T;I1@JȺfNxC;\RQD ]]Fޕ"x.#̼T$=`_X7`pxNc^\Xlrr拿J!09)gBoh 3hdmlhNxMz".3!rm11i4`ZҜ# BT @H[e%V0 D*fBF\]k4P"%i/8ݍ =8 ^ ACp_g!J zxH1%K|pMH*VZ1驓.XE._ r͇à7,8TC>r餘mCRg$q;>l⢨Z?UB':|?W!3A#[Oy)-V>Xr蟗.,.0"Bj+ _'ibb?}\#;dF+A͟pٌp0zycg\y( *v#?܄gE?OR.Gwr&A1%5nػ+}/&޲r&X=!,7Fk4UTνP[IX<JdI$SňbLj,>ѷJ!xV$y( BR[ qT0x fFifM js/`8@HqFHBjH I]Pp!5p8K~1"|FpjH]I2`r;!5Y!u+}(W_ }9{b0ԄwBX4X KZZ<t9CР804ڠd |$@8RM^Q+<_)eP"2iMmWIh-v4@y߮/$Fո@w.AV)-X%Ղ!ă'Y]$AJz4/D0ܿoDe=(QO 1/"B!{ ;m`5cT@XG Ft:Y%kSnnP1M U?#`} ЕUZK:vR)kxb>ȅ(BdH">{l+Vy `Uq/n yn/dХ 6NZ0.6f%p0,/*jf'bmx`!b̈́psL .7 `I[,ے|+ʒնcbkc[nvWzU%(Z||^ N$I3- kRwN=%UO% ,TYȄ(Pe9A\%gd-A.+nw!K9ܞ,1{d{jŬxGƸk]mϳp)٠Nb\#X FEɼA{ C=2z"%bm#BoP0*QX`H{(*M愃\5GkfAVvqoO“m{F%m O:>pNs.L^qi#(Zƈ_$tf$Q$6Jd4Gc:JمKK$Æ*l2aN&v'ECLH O2XMQѸAU)-tN=)5XLfL\=MtrWN!b&tsR}F1/\bKH[96xTpT^EhU:r0aFuăvgAaV-m39rsO;VQF\_=S#"PX$192 S}a1֊Y. +n9㮰dr"F yRIt2WM* XKfԢ>q|UX-CXUֈ eL@`bҤI-?c-3J_ st|"ssaUBk*oIk@,'wqJ9yD\Z% HSX'4*hfQ#[Q01< լd[u: GXj߆WxȚx2D'#|L2&c,@܊Zb[:)T )2i9܃fs"f.S[,Cԭb#Tm&2bCR](|Ӱ FJ/2^^q&yC9 q(c2X Vڇt>2[Ã-\VFibf.}4-Yfݹt ņb|+|yqyFM*  y$@[3v|v}Ð3vk"̳۔olyuLVC*fhI8yNp5?>zcǦ_ ilN] ̴фiD:32@MwR]3F5n;liM4:s-&H={|. .PFThbuSFdv;WM#XvJq[ ]u&3s~_6N65`E1JR-d=2:RzA2փIFSnKO.VHpRq'(?ۥHIs1EΫր/-IBeΩ{,ƍ2H^3Miydg橎tFg(qaU͓͜IDY%TgLKױ IW3T 1wh->7WEڦw6)Y4ltX>vAIaF 48t),c:uInHţlAtvd+vQSA*؋4KÝأ3ka7=u\` "M99VC\r ;iɧ{ĝ|JC]&Gz3M~4]o_?>͸E4|28egc 5G?@R,d@:'|W*gH09ZCi4ih!-A8i챧q`hٙa_7 ٪Ӌso`Ll .h`[ZM}aЋt7^͍ ? {v׿|1Ь?_?{YقWPyE^AE^y,r'|bVwnl䵝Z>`~!8VNޝ\ [(5Z1+晐e&7ǎNu{Yz͂`s9z1- KKN.]/U#]/C+=9G I 6AcyNV^;ե(j1a?nAȺW7;b$"xWt:әy,r8(qod J4o@8#lIOٟ~O]X.zآߓ}K+ftrprĤqy&'vL90!3Q#v^$PK|ޅٌ~/?yֿ>x[^aO_~5t5?o޽_!`o`u yjǧIz6LLgo;9=ľ__f`{Λ/[GxyM&a =zȎ?oƧ8=}qcP9\Gy5afo\Ǐn"y^Bu:W@ߍߚ}_ yo~o| /*^sͮ`||bfqbO/O?xߎ-9'{Ūod/iٵ}W?4㥋#_ߐq#W3_L>lvpL?^7o313|5珯Ċwـ~xjoF_sOGXϦlk ae?^~#ãL:߽wqIawg> ؖ.٤%٘/ɾ(M6ł }eUFFD1"vQ?ޫݦԾ>T^w*~vyYurvE:9y}ogP4Iv/dƟ_s'>׿-E/^z6ũ-};>>gXͫN8;}ng卿aG@+}qrrGNVS[yuQ ~?O g{y<u/3^ S~9^=Y2 e|ZitoUoUoUo~Rck=?FPY\ "0~11XM g|狃?x5K r-70h&.LߌfΌey|d%'pZ UCjhsTKV>^PíZ{.U*>xΆ[T(ܩE}cܪ;9He"Cv-Fjq9 kWaP\W*n_ȹ#S;7QQw6,bsq{\:νNj{ٰЪ i̹P*b֎dMI0+ .$4Q1{^lŻ hlmtwHk5 B0Tاly4r/,x>zQ[ Gd9 T)QS@*

Jp*g%b D蹏a >8SaB'҅gEmل8S|;: k&ݏʱÜv &sTJiS<ˉ: OC :vŠ@wh@5Ю-;El]TRTVYQI]JC AhG~`AdD88$f}uF''!:p(KHCĖB\%G-L 6C5d:K$X>hL9Ŵg'%2TJ'xCBGk3{JN٫,U1i6gL=[ҳ*=[ҳZLv9k.څX޺D4l%L9HM2^zg`Skg25Vd3 ;ͽܐ3pC:& sL))مtl;-3TLIy&t#>3$F mq/Y >&K DmYlDL*΍ Sd݈Ax&sa)m$Y*>sӢDMTVqM9.ܡpՙo ܾw@I;=(hc3Jʝ5mƃ;ylUĴRW\U#QfK#V04' \' ҹdX9&xC*ݕ1;N+Ȳ )_b"Cv08y *1a \s  N%_+݅|;t^UA]*wUA]-n"ݾ];{Zfv|ݡ{O)ha:XԵ64*999bl'οc`DZ )*8$lJd+ xio"akv[µQPWbp Υf8( i▛*mܰb=D *lARZͶwƩɁH=H'8_k!lD6i汩 RCY4Z&s1v:ƥCjm,߭kr""AuK%I8H,4 E 8_ܾW 10KXi|:_`r 6am{F#yzJ&#lxr`Ꮼd *(Lw 9$MfJBL*kwtƃ:ϸXS&?$(o/2%\qd{Y@+؎p4\I-sؘĺs9',O'NXm 5'V9eпե6\m/V60\mr)i}2?UZm;>i-0-cnF-LgZ>iRPv,&LAç)9LT;{ UbEVPwN]?]r䮩E%EA[R!k&vvRhcQQ.~<>Ap>M Hcimt%pvS1ZiE݈~>. O4ɝX S% nV5cD5:@Tw[̍\( [O]<{z0) Y2 Ryuu.<홑73nexoNOW8Nt̤W$.} /nsR A ATAg|T7:gK?+2xdgkO<4eLɴ0 lLba<Ӣռ$rT/-UFBi3B3dZlQ!Wn=dZwEs\ =LvseZ ⇔IϘo/3-lCk Z@pBxhmݣvwnȓ=g `֔Ԗ6᜹Ihu'{B>5-dIhZI"Lk8!#TR-?ߪ4)bwc4e9";dЁcڬUn''L+[ya6l<mY=&ō6}c!<,KLv@))'];t,W޵m,"etuWWWCd,l^6nYے%ٹ,ߪ)!eUu]f vN3$k%,%NJ[;}AΪj@gd!-}=CW\jhț"sIל]kF[3NptJi2'ib"i)JcшePxo7YE&2kP%6UB<5Al q6QM}[g09Uo_U5Q#ـ H0 MMӣ@kr<}^WÔP4>_-4L$}9 E&hBqz{+Lx}l[\,œd՜zq8F4f'O2ŕc}ҔdXhQNHecC1X0 S a$W>}m,oI}7MƳNzoz< "w i2G%7E'ٞXd{^7_Vu(!QRvl̕ѯ4QEq5lիRu0b_ϯn+znk[eϛ{\GvRms+nަ.7<|nW/^/)Ee?i⶜cƺu?1~bdۺo%f?+ghZrtML.(ܰah1rPٹn4h|#IA4p۽=ނ%ݛں 02Ugh@#igλsѡ&:)ϘǶAw\fAWBfr= ,U=C-}?n?xuX@^W#4ےײ]Jmҵ u,@-A㰔.:faMairE#g%nF|YxiX=fڥҎ5&v"/Xa)UNiHQbۖ#A1FN;ʑ 0[r;y3SHV KKEP#8EDvQ'X'YO)+,|l4@Rʵd\8 ]Cr9.EQuF;fO'G6^/&QgHYpYkgѬOǔ10u?tz>pvRҞ˨V8 ջwՆ.H>Wś>mPc}yèdgN{J͹W0`ՖzM_(J7#D ibiJzG(8o<`` IId؋ f}dX^kM&aTzu*P4 *M2#v+#%uv:)19et Sai,vz ]©Uoo! \[rlW]' M} בo^==!f(|2H!vȾFY c7Q`?,0Ac-siO V&.x-8Φ'"X އ;H pHZf?Ȧ$:S$uxW0R28@p%Elv)ik0O j$\>)rk5zqrco+{E:{]~~I"˗/;)ńtJ#ŔЖ@ ǭU,l jNִ\:/.E#ϭ&{cצ;Lߓ4[!UWYh if&to R)Bɏ>׳ x3Iq䪚&"VptFQ*u"B5X7TC=phaAQE m[L5:Yͤԝ8 ;xIS=CB^WB ⡒w=Á3q8|]}Uڬ.4)*S$u¡I5FM/)u!Ur&שCI4 0 *%R+澑gQHe@)-}p'շlg>]E@nģ760>7= g8mc[23f3pB>nC:p-WNy}b¨Le=CUB\Vq|4[ɸQoo4F aR>HFCe|Fa5۽l IL:z0R4腤:'A2bzCφ(OdVooQ#@v==sЁ`l'cT QM²\󛚾sW4hY$*> <IXpz1j,)tިQecy' rA!\ p LZ sL+5pv}h*n=o `Ta-,N5' ~YѬep_ahD1xGq0E"㑃= ϽC^dM|Ff/!`= LC_Njˎptc%}b>1|l{,Q5&jJ0Y1c])wub5f.hYܗ rokl cF\ݭ ڵgœMfE^lۋK7u~bW/RTe~Gϋ3 ~͗vf7oT67 Ҍy{tT7߀NsNvdy_]泣O?z͗0"{teӮjNQlQR|T/8 ~ڈwű{d}vm ߧfӆzoh/\!@سBWHΈoGLz[ۋu$GC3#A[9  z[Z+W6+@ppc49PI9HӸ.RKL)Үќ]..=; :[]ݟc7ׂ9g6a"Z0YNr8}6±ˣ;;㜎#9z8vP+oQ6ěݽpצ qݛ8ojGy2WiͿ -e^K[Q#RX4B_KW5@ЦAjm0j::IQh1Ds7j8sH +N)߮+4V㸱+ CLX\xS_RM/|xrw/ ^sU~Յc.KץhMC] jzgO5{ഹ۞i|Dw3?+N g'ke浿l>DGo|T3mمQ?d?tu_-Ejj{^IUuo4x~cϾ2j|~9aճIeC:P^BRW~->˩7{QmFw>s;LV~D{=m Awqv>>MHr嚪VUyvvSpT4zu6ƴݷAӡb35lUmC߇FF8@m72*0p^i^H*%窼V5M^ $\}_M!jN);&ۥ5;&O=zGrȡWdy}:=obpU[x9F;wAZٴB[ 4a*]uͧ')D(?ޘoJ:\kZcdZGǃ8t& 52X]2:r!jnzgSK%#jTg|tkܻ}߿ Bp3':5mlhj_x .j^`:bl(0~ހ vQۻ7؍G ;t{p\$Wc{fO)w VUt!+c:0" `@/町($;J[B(SmQ%UNUc)/x+oס8HB5q x_B*94#'*.H6?J]z,=m^@lqp)yK9RaȽH(=@ڞn"cnBhSh~MQf!j\@qŒgg/1c61B'`46QlX4"& kB՜MLO!Qj^dMY1KA:!J lYc1-%pO!M?N>ȺR Um¾Y]M2د")2(0*612G&~;T5,Z#U<4ėbu97>jZCHi4{5e\S@*c5 j6-=!խ:' K.5[uHzv2>N "B]?vp)+)5ŜvZN+Y{JFZC!w\s8@BRz"+]uU@kWػ qj_:~b=(s6Pή0}X7^$G9cSQx^!3 jy'J60V-J: ZJ'IpČUPvZ_`Řp%>:p2cʳgAD[`^XT(:'GPa(TU"0HQ݊dR)36(rYfܯo@qk:3965(/{h+{<竜򸓲 ݮ06T8!]` ŨY~a;X~L|z3q&x_ؿ>"$quvf{˜:RS+Vn{z{v% -qf$`w y /uòd`[n)}y}ӥLe+nU?;qu9{`֜Ti{U.;> "i1ſ y,.#_ AdO)[`y sbO qFL;{4,*ݿlչ>eܽ@d L~xr յpxFZiLjDj,S/ 5< BJU(||$saw*A)yh M M(8f,ΰ{sN{:8fy<dU Gj<>t}n'cx}aݣHVn"PVv 9ƻ{+j#(j"21!POj>5h2+Ǟ/X֑(V048 U%,<] =}A bM9k-|%A%yShr%X!ZKʸ[yt{t- /SfۡdƤ#S%3h f^ܧ tz k\>E#29lXdZdzIҪ]w7BzOhKCey.O Yk$8,~W8|vs#|5w׀;V(qwBZ4%^"ArTBiQay%z% ֯'[Z.Lk.ktϽ0&g3r5Iz7D<̙Aaso20t+ eZj^GӛVBʣo0@L+P(luaZ`@0I L k/x+Mbɞ>Z *IĮԲ։*Ny׉Ңu3A:+c7J͉}ngRZ> ZO㠼IQ{hc@ ևCm_j^Yh?b)w@u/v&x]~eX6LGaOaGuW :wѐ+5;;^l-L6%=[\Y-Ύ BuUS@3_ vXu1.%t ;:AKvr604v.U%=߽Oҫγ;~+_5%Om7cVV#+ 2rx*?ֱSB® j߸׏1*\GlvMԅ•35'{%LRIuXs?n;~[M1% 6؟hȆ# ~ kg^3`8ݿ~<3}\v,299ꃑD_3kʹ-5b /myie4F} 'IuaAF)3!ׁ)b,}}{5è`Lnz6I?Q6ۤBs 2Ꙭ7I>{:8tyjctpk9:FӛlonG0Xavi5l<,h~Zdz{/Ih찊L}8}^ z?C|J-0{?pi4fD2ߓo(Y HaZw/.Id?W/^N@`h/W ]$za/$c?!0}?Ýu<2ixx2OlhIKAcas3 noVٯIj&Es5o~7fS֜rh2 =x0zF1 Xd+f577J9f= m::h-ycx5şk/׏?/?'Q<@9B bk} >Oʁ` p-A4*Gn_Dr {@C2ؖC0/"yIf›/ǡEYrv/3O@7~:'=| ٲ1K2ó>ؘ ۗa|s6ހϧ3&Ss^}MwfQ=М܅e ʦ× 4eҵϒW EfXrie 4e$z\ }e06pj$59񑹻JWYђoHS)jc |?+@:+ݿ@~u/^~ 4XqoL#pr癥&Eo=#P_/i>NяfDr.G^Fӏˮ^%`?$,{P 3ew~L4KzbB>S+WLߎҋhcK97_f/|iˮubqɿL۫9/-.F~o;vV]χ Jy&2d^CJT̨YLiN]HDQz`\/U)81c(Oc(JiUEJ՟.. [GXaʔ-l^'`MY^~8B2-t3tu\~@ݱ'w~ #O@Wuo"9N?Ǡw[^ $| wm_1aq(a 4'w..ERo ߡ$+#ub[Kp8 ]\bPyP~r%K W\H`5/Uh)C˪4Ē?TUz窴Lަl![I1C6؊ZmԮeBC&bt˒SDdqN3qI M?޽DxDH#/1ddB:04P{O.j[AΤ>=04T(A%HPEg?>)[_gp|l{-*XK6tǞT?<](?h: L]1SouEAYdQAV?ҳoxҌ f  +c]dHdF`o^F8b5w@w[v:y%=8 S SP,Xx| ăQec B)QP[:VpΧ.M]r4Xzݘ7)0xڣ ;KA1@xrk9Ac-9g&, n(ɢ̸X< +ڿ`MM(Q0!`BFE XUJ`;duz"jө= S{(LQqj/ZɰjYl~Q4IkTAqHMH;&Mk*-l3-8dO*MbF * 7h7$@Ao$4Q0L k+UT՞TYc&؟zŎ; ^.UV CUx#bc9 rh,AyY5мN%\hRUtwUB Yan7pF܈Q2U= dN " KEZ 0QKfX16`.xY$D`H{BA%ƀBKvc6ZI*O dOA|>L0=*I:vLSS޸E CgR뵎_!p [@mߣ-QkI [=k^S0'\ / M.k>J kDCL{UaSA=Rl:2Mɞ% jP6=x# :J3":&!DX8muSxr̠ ]:qEH)VfʎAxV;0]^喢C*zzxU!_j,Jn|9_c/M%rZfwdgzzz %6ZY]C2 )b۽xO>`ss{Q2/U`ぱ>Kj'hdjvL{1>Z2L~\3ȅ?c]3Y9GQLz1ZgA>; 0T3>-ؿ۩fH5^:.)) 2GO9&2 u%2Ҭ$ &VTQM붃R gtVI^=[JJuy&SSB)(TBY p[h~FKM D C6qo!'CށnFE؆`9+ 4OxY}(? y$ .K[s+MP[ s[-r57xYQ ƃKpc`on*Z;x|)~8<6Khsv^0=50}3p$k6rOsF];!(؁^,Hr0Q8ߍ߰JEӗ+RdMAL8eQrPyH.7v~tMFó;sn,0pbx7ʟgegN:dp:j}8?=lcxìM×ޟp ^\//rj6zwq[Ղ)%N8\-bՂB*-!YsJk)⡔-=V uK0(8x o?e^nڝ׿ڼ~mNٸ;x>Ɨ_kԘ&Bx= h"`cܿv7?xd)+@;= {x8'i1x7NN`ΰ?0wBH_`[9CrVY Q.h#c8}Ndb:*D!5!!>_=ksggPg Y\ !=Kr~^}kef'J=(z=KYE!t0li>'_ 5\#,_0u;f2IJ9f ob}9SMSC/d>l/¾O:ڮ2wn^qZﻋ}1z}{seP^ݨAO2)$? k >y)[?{)@^W2fjv7|>-t3oA@IlSlXz͕ǰz~0cBro6l<r)Aw&Cȏ|XaviÅ݁u  pFc?/Bn@6p]FU7zɠ^{4B`F_~g*+7z݃)Y/:YU#7fk ︾MGz{GE0\_̪ ^ aޗc3Jc&~_3`_Y6Ňu+Ġ_F׽Ԭelee4 {ghxTN2rydm˙3ce&ΜFm3 gCgf}.ڼd|CIJ7:=*E0R/UW_[ 劯l'7e0 B)T #уe˒;QKaSU}A,m;+o.ηm*\vvFƎmoӣ)zLT[g͖M'q]}o*A}}3I.myr57޹eÒr9weJ$R$e)Vb,3;;3;;S>rLjXml긬[>1J c@Ӭ@Cpa&dPދ@44z{|CxV3E 9%E…A0֪p$|%tJ)5rmdҚdR4(/}6J7izD5I5ChggYԹÚ3ŇJ v9"Wח_.Kݶzb)y?῔1X5RcdϗFa2~7CtA~-Rue̦w/wws} hFwsJǽgO6q[qȇy'nj㔵W>|Y`p}9Y|1Ji_"J'^|__n~Z@SjA.A4H0?.梌vPv׭H[wNvȪ)ޟ>}B3ӟFW_$_qQ^ϯqٽ% Ń/Gcp]fRףt49gu3eAqn mE{!JoRٌ3{PɽЁGF0zǗgh-[QsV?1 =L"+a1&Rl|\?ls1>b~J1ULԂ5=Ku=af2BL)XJ&Ϩ ϋј6#/oo ZߜsYa9[8"o:_ߜ"sw =+p'؟crOɱ?9'Ǿޞcr NߝKo@Il\Ugw)#&slF~E<NK o޼k gU:lp2DU: *%vNRēU^sJfO=a9^G͘w^hWSmDj&;R*8ӋGJ04FN1k^d.PS Nx'J"9P"{LCG{#4Ǔz Úw-~g]غJh:N.lHŐ XuÛޟ(i)ݕY%xm߇o=DH,W1ubTHlf}T=B?Ao?!?ӷ{ B q||UmJfVЊ| -_]}GXbae+S>]^NÂ-"yBTjV؅pMQktfO Ȥ,ſB_Jovib i|}xз״G/ C ȍB'{-X)tjXiwKJŴ>&N;kN&5"6ASTxr!M< =!6^Ek mN,@P[fyjW,uW$4HDbiIr *eey o{k´+YMs-ͬTs,Č3E(rW9Etl,].e =F.k[(2˗M*L 6<8i?-WD+4RW{_>u~M.3X{8M֪w h!0׏-_&Տd`˷tatW//w\oE]J6״HK\@7Q~)2g:d8[-Diݎ%e㫗_5pF/ f->Y.BήՇV4r)xϢIGG_DuTA)$#5Np{ AffKC^edWgSEPNRvY@z3P@2b0Rl6MR:c}aȚ哠?jqhG(afjکgے/++wMŋƻ I gzkKv6~Y^"8ЖFq C8T'Jf?dz\bIWCJ`u S>/{HR-ٵ|V^iz0' g3\{?sNPTT Zs_5)a !x{Ai)yNVb V6@(SF-TkzasƝTyAimUs8&3xU1m ZԩodƀEA4L4i'w/0mcSXDԾvCMV1I%3uk'iŌNV!WRTKdꦚO!v9~Dn1>(jBu]!*`8o lqܨ.(l&72uqlkڨbU6PYJ&*STVHZ݅ 'Rm _dSjVL4? <օi0(Oߦ yN=[{MW>_f6Mm=HG.Qt|ovwy<1ũKn g25XgE+E1f,8QF4PB-Qr".?'QX(P=h;uQ}Bzwqr6H-p4\?c&biТeh=b,q]PD 3X?xTɣHE.UY(q&zmcK=¢Et6A \@j>-mj%mYa"W-\_Rjbw.REJ߻(X@aIcd },V%.w:RJ_QJhbvVbv%`5ŘXKD筦x[Y[c,PNQ)|",0X+G VCX'9jJ^+"^Gu5,:B5FxH9Hd$n=A˘~A.&PJ݆^G 2DѦ_^m]ZJz0[mo*G̽:F^yb:P|P3?5ZǢaNkɥ#@+R7:m{Al%=f](0[m{qhYok{юT!KxA!Zۢ)͊!O;-ߩ'R`;=8$fQ27USWC+`:E44E'U[kPa*S~]Dazkaʄ+H (wÓ8=@e9&9 ݫ ˛$Q~㿾~ʩ3Cf8`w8|7-ӱ5}>/lQ蛣ZV.]w.yse:SyΊ6x̚&2Ƥ KM?$Wb;3;!ӽ7&ݥe|+;yoifT{{j'r=[ 5CRqqLJjlj0c4brε\x0֮ 7AO޼Éſ_(>A_Jn@nue: "?.cN3aR8❩ 5}֪-(\TƏNv P|4O,L K6v 8.@£eʚȑ_ab>K;Bse'ř&Kߝ(] QE6%*zf2g>L%Q*+e ?" 7mhëZ s%ɺh gofM*5!^>ay8yѥDI՚U%#R4>ҹJq)駯̢$!x~82\%DK![]MJ/kk9]'ߦ\%|"!~hH 2r"Q~hHt4֬b) BxZhVq_?CuUeq^-`at.?8>XC70`9EܴbDmdlo>S7l2 ਫ਼n] a_)'y )rtBȘ(rL쟪k`/UrϠR!,xcI-?DlG;4ӝAZ6Ԋ^9~ImD fč.B -bmrl ]roæ)˅j(Uʂ Ed3A;`q,e9؞ Ҕ_rLMrPMIC޸:}rz9kSH]h7ӭf &-EWZZ~䉔Y3^,Ѷ4ID`6Au8̾گ?KJ ՘I ڣ (10XmԁL:cF*a,ka;q|awvKd~@M5q:zM:.b!9Ρa %.(.HY { cYY-<:"NԮy78yqw})j p9,coY1!n4qyw364׵v:\\uZVB2.˳"$'fe3TS$~p4wv\+$-^9ݡ/hu_Ov[{#ͤށ]}/$p BEKM ×ݥ8$&b*0j Sa96LMƠ%@/H$@a?dz%*?ۛ'r+2u3 7] bX B>1LMΞI80sڦ('uOz4Oz4W?oS2VJt(8EYV,rUJ 6F>}+ﵮ!XN(khf3uP3J~Q{{PɌI$(L99.CcR Fc1DB;PP~Pp.rv\qQ;6j~ scXA-%b jW%TׅtƠ7:0L"؋Lvgz>/,mv9>TS{QҊRjŶ .dLo(NUu1^W )?6] 9ҏ\a$jIO~Sc/.m,4'138֜^N&tpO$ltPVh|S1 sd*Øa~'\1sJbz_Kd#\'򄢽亍?;kuNB=.\7=\%.\w㝨3)N0|:J6u}oq-;yrjC+\/p"/pqBjAʇNYKϯe뛏OW3g&7q<`텗dI(0xBFnwURRS ɂRng|%IA.7.7 2 ސ5,2S̍2kA &BR(, (A $cLQ_=P}eʌ^uꁅ``,J!%}v42 HRQm(FfR!)8Nri+]f$F_3:O]!p).|Tj9 [w ʡ()d)j鳟OYv03ȕ6ZszƞVN?i2<0QX0 U4ڕ6 0nc,N.}Bc,CS3 ,Dg҂x1ƈmo.=0M?N~ [ c# ~;WfoK03LlmX{!S6ܮ烛$MM7bb EkߓxX7hlk Ѿ10TE[aߴ5 60/'[a㽏9waq3uR^u,a:;Z"$bG02qNp!M3#PRWe)Y'URJnؚ,)+Kd9e[z v\ %F[UJjH@M*9e-Ze0ini1ĐkI&YN΢ &b_ޘg2s#χY.YaI]*`f`a<8(ʇ僇Ve[?"hJ83*8L5+Nvgx;Z`L9H`ZyGrFޑ*JHyGtT9ai+APu?W7" )`΋"ƒPdɔs*FGm#F޻@ R0UKCӤV>>~t-FMc)Չ2F"kSC0Sg c )Ğ=qQkipfȑ - Tp2al ΄+_"^PD2:BL0k3C>HϒCsRPC& i9SRSRD h_F26%AJ-6D{ !Kf%'v9!Y3ܠ8P U&A:TZt,HpVV ibcPI`F" qɘJ,FRm1z+GJ0UqԋMko:6T so;_ӝI`~ŷ^.htAfz;Ep"`dzvob|s~^߬O8%d:Umߑ]| c(Ztdsnfu`K B,,c؏!+Nlk&X?j@3o:_}z[^SB!iDZ㪃lvJ؃GbIB%=(oXJ * qT !je[XۖJE @}H])U}+>EЫO;[WÞKsfoy'ߍ$= /@AD5Vf]iw?!`DO4N#c+ |7$zm94 Eۦl:tGX7qi 7,nQBD 05VkL\c5EԚfL9SrvW;b'Zkp5R'4gI˒oD}QTlu]q5[3.!թL+??{Ǎ}jK4'G{6,Ӂdc98b!"\zlvDca"EpGPv%MFi?uKMsP*u,CYj&PJAv?sCG +(~ʋfQ} ח?'Y: qm)9DC&$0HgtQt'{oB =r6+Rtޛ^quYJŰK25'W=pP{wqQ 1.EzFc{|'. rS4fqj]?}㳄Qޠhy؎h9ς\ftv qg Qú)h-3"j&HnytBv;glO@S2خ~ϟvnyǔ#Rvک7\ƕg\F=J)A_tI39 ;.*ײTaÓV!dDN G *].UUUůMWaT c 5 ︣ ObEQ8֟F]<="[hT4}Cӣ{8F)qv?ÏѽbQQ~I$%6AUߣ #9ڠ@A3]cηByeAM`R!Mh`/9$8H9]ij Ni)J ~X ƭ"܂pJ9Rz4 HNfsSPWaK{z%y']Nq<͈Ȅ!E WQp^ҒTh*@(WQ܉*`=n턴Iy @'YٺPʆ󛱱1J8PSS%*okj ii xN*8/3_鲆84y{ٽӊN^69J̹ڡ k%(=im%"%EVRYjȭ*JS PkdF"CeuNI'On,z.UD#kGն¸PȈ NBΈiKQ)Ho_5_i5g]}TGy\FޓxNWqt7/z>=O *&݄<w̃[w~H_jBҫکJšL o4 r.$,ϡ??2g$7:O 3s_gX7{kۭV/<[fqD2O +C>^<?mEu̅X>rvuwk c&͠o7)ű:w=DGBRb1bL(Ԙ6QqoG&@V;wwquwQ/X*8su v 8TECɘ ruͷM濝=<(LrEtNpj #G:ꛜ [vOdyL k[;wXpg/qW\ 0nX%3e.h;R ZȰ9`+V[nMͅXVw@r hr0c=& C]x7 Ed'VLQnEBs )8F$WKY*8qR[F0m|&yM 'U 9ٽ꜖SID'*p#EH%Ti u%JeSA[vH-h QmOˏCS}T(iDndVkJ36x2֖29~/:뽇yXY#=Z6B[~l"\*ٝc,5$W*Md}*햾s@[W(H%`w+~.}A4:/ڱ!̙r9{|X$!R }KG '\H1tzp@9+ A^ОPQ!w饘@Eƻ5n|;O!kq~E_5+  q.PBrȚV-;_`kGa֯PLs5Mn'<>遞i>)epBOs)yE PRvA>J'zQI*BsOUs{–ޝs))y^&O&/J1-8e%L;FMQ%oau3snfnMi4jLT *XcAC楪x% I0Rk+5 /bg}y硬9c@:??/iPj E=sGz-0 >sȶRo=L܉c'YfD=^%>"T(yσRGWn1*E<0{oc׋w`L ;xk QhZsu-O,G,m٣sP:ͷd%fE=+9О^}'bVor.UE(K@P (yodf vhEzPW"epRI4E5)j2]W]k:^MJqong,!xr!K)SJ!OZg@nXo=QJVj9!zۉJԖ{{Ŵ*(sE;Vul\żbgv++:&SLhZ'E䈔KON# EN\ЇR ~+S-z;c_X9g/] `Sm φ0\{"ytoAO%_)NylCxF'pN#E֗B0 DGO0>js\ Y@b3cӃ#&uqq㨇G#OQ=^yyG>zte8xJJ4s:2"`d%"DVx"%"SF;pF5ٍJL H&txg{pjIQs& *Β\Rӄ~cBYLŽc5WgM',Kj7ex?o)6_euswnM8޹i4 JuRH eYkkkKM$( +%E,Vɲ]%EY^r_yR(YO^ϟ*?L7{MHLB6=nѷ _OצMr=Pec/Rڟ! b̯b.($Uן^jQ|*[5ٽ#(GîkϦXZ '*PjX]+Q ͌!5Q!J+vZ~^o%1'an\Ơey~}Sٽ)&tD3-В(v\h5 =.e?8 R ABݞ꿶՞T JJ&j/'?c2Vz9&lTk}3C#4继"JOLo {sl<yKBn~O3&>G˟R~"b+/)K8]Q:|h}5F6K3EۣUÓ\z Q1 j,+(ܷ cgiC͙bל؜L_bd4K쭚!@w;G#% P4r9<9 }^(]/s&tC+܂FBE2{%!NẠS_}TRJ/td"&R^f2إERv5P^2+ IL;kmG*dH. 5sڊn4Y8`v[EZݔ@V* 4WSeQT;x2cp= ar@ ]/iBG6p"{;?ym1q"rO^]8 ў\{T3erϬ e]Zy.FuAҫ+KBXy8~ָF[LaobT= әJk^?_lYOygO1 ԅ*(ñ6 H)R19z˥oٸ/w*7o>'[q~+-VFˬ&$c|_/?OD%V8}:2};Ôgata2jƄhTr?c5}4S} z@ ŮG k FMlbw)3W;gCʻd^y.V$ Zaq 2^XDŽQju d́ ]#cQkkDՔ͠u#u8='hV_3VbeQ0)0Lj3+a4 Hv[l0Ts1}ovdtтfk $1P]r̀3C0ΝA|r O{<9 ?*ӭ\Q`q͗ʀQv_r,Q{ɒ/:ʲ7d#!Ulp㠠ݍ,a=tC;M"w8W~xSiϩy=+izbx=p{RD1NΉʹgO1Kgn CS'I)7ЌJIɁ'{⠸a,zj%:/1phAo{/Dk0BH{ m$t L5'< ֟l+WkJs!$gyʻ^ܥwHӏYL蟳ݟTM^7Y.V4p0!IRmQ1 5FbLl"eka@O9ϻ9_L&sUZֻ^-$I8ؤg_&X| ?ő4?"wV:*Z QI4+^i=UYlPFkI-7UE=a?kM=xi`L vPi(+R^C%Ș eYul˧8 TkZSԦ`J(njϸ\ª \*QX4Sab#$lƉ'k S\'_P2Iypd6x s< #E[=Bh5VM8ae/a WYm7Ɋh^ @;t +g9U (ݹi冧y冧wmxڰͅM(4aǷ؋'s]慌1J}pfpA&.-Vޅ$I0 EdBd3,d+``Ʃl_ nэ]`F ɅOɜ1/9H|4+%΃`*8XZ&yXNXpZKC,7H,(dJhM.`JZJb ΄ BepE u >ٯJJkr6K{NIqKhe kF21&/jQIH#;ujViu[[يrέ]sni{zFq.ϓC|T ~><-½\=^s~LNvRbCW L͈O@4goH} t z@pnșm wNuIBnm4xm|qn7rF~!x޺%Gq2IӉ^)~;??ߢ m߮)z+y-䘰\bϫ<oM l&n7?'3כEW9O܃|m%y0a( GY69+_ݺWGʯ_˾Z>x ݡS5)7]NDw>TUh J;*P%; `0@S!p,`K9I`汏&IJ/0m)h^5nN ph :EK6D8)Qs:T t%{5Ŏb !{SEUK\9^[t&K\D1C -$鈦6 SB4Z(X}߰Xr"2[J;9YtlM! G?B%(t,o2L-a#Inˆ\"^bŎF'YehbTf̬ZYEc-FF.l,YRJ30ZPN&-Ė;,Q'bypHoRL,Tx Ɔ)KrqF!X¦"v3lj:a.`tD4zn 5 ̌9>g1l D@΁Ѩla$hC@ -:ܘ]}[ɟ-?ӯ'|f0f&i|?=|wY|=߬hR>Ge翿{M5Jsh7⧅JMh!]tzӾ:9ź$>ɭPr`)1,6bZPfvt7P;SZ/sv?LU3=. A{3Tww;QN襅sJ Sv`%h &P=1& ']+a$!h7¹F vq9KPDy899JZ7#3Dk`WaX$ɲvIyLbqSp>/<.b7H{b}(+çуfg_zwiEi-W TaRM'Bg[eռx檨e \_H&44|6k~P*:y0ٸ #]#:7cB*C8ÔEi;\Cøzqr.Q6I#+T ?ƨNS=waX;V:][}lBqaPz+a?6=|e9;InٹvqGr `A\9cUOɿrq,TK";kmePؘ\9ı|J h9%O?] LP9F8FFj@kM1yNGywjM]Onڵ/_#Tuц3JЋK՗y['TG[q"VF20$Z]rw?2qr iXL`T:dO 3p;YDh3O|@GyySQ0hWvgWv-teA.WЎg\6Q $sRK~j3y)G7DؚAΠ?AǠ}y3e{Bf[oXƛ*o+]mw?gTHłxsM'ؑ=kw葮y!$W-_8qޅ<i^FH 𗵝3X9 z{1`J_ԵNm.k_ERf>剓bXԔ䇏/^\=8H~ P0=o悥lϩ\Vٲ\:xb]rS?>T:Z=#Z: a&D$lěؔЃa>G q~nU5ћaVqFmzn5c}d 8zهX{o蜷ÏvmAT섇Mha*/L](9zU6݊ *ҩ(PZ:(BMJ,D}&#Lre7KR8^Z[BpmZ 5--^њ`]giE$ƛ6knlM$R i^j.'\VV09DF),Z*j9^&]FtZ)gb}ZKtPԹ-r7d8}pDw< f˜PBqj6 tT)|z?kb Um%Ć nEUm8S!'e0dN278}u-y!?e"b|P膁1+!hװ-g`b$gvB: 8sQ4y`ɹ?PNq7+Ƽp@$*<,2spXrt06q|b˭jUuWOcdvDHQ$ES=9 dz@}q=B8i߯&#ߌ o~W֏2w^шJn(V:ל .ks APm .)Hς WT,y 1M3TI( :^)k[`:q-cU)˹c>eL$D[Sd"x/IkmAFY8&Y&'I4~!w'O$v9QӱEzg6@ ֐[هBdTZJ(Pdis\6*Ei-c8 C| rcoBk4 8FpjNe3uJ8dd6gs Q &opfOZ#80me驂l4$=F)AfN9 ,3Eg,eJ^c0fNfH nMm67ym*qiihЫ-7Bֶ䯸j(8dJro S`ZmW J?MZ>/yaւU,F*c $U^ލ/’WL6d+bYOКwƼzc+EQ.Օ_d|nSUke\jv|;$!9EzZtW O? <zڂ1vM)>\:K|6a#NO+F_ʳ ॕ&-+W;8W? yz̻c푙w{YOyw*<<=<@Y@Fs{VU7}/9wP|TpLlyl/Ox47wT`TNdɘ!0D'v>8pϢ-\ 1Z_*4 h@Y·g V`ܟ\ΡS%&mt%i J@` @k$ 367Veijr z!J!VX ]$ z++226)84E,4I 0Yt(UtBgCrzڀ|o&czJ [hn# ^3^"2Ǭ ^pP(9Vϝ4K'nW{N/26Ei+3s%y63隣+#3b 0B My5".p1/:8nz)Uѕ£/iK Oza<F}rw<=xo:2rxh7z[U^B3fQC?!ՈV/*;ߐ?k߅-{̗jJd?̧~lo~h9 |7ðP'`DZj6BlmԊmqKvzTsԽFj#'sF֮SQ'YZFwݾ)LFb~WzSJ\݀r};qLn@ku圱EWՑO:)R_pm~A@|wy=LϫV8Wz1df^KaqgĘV5>|15>x8. {Lu==~G='xWG꽎 aǻ9*^_p4LQOvPAm^nCzЌ\'{00 I (hZ=@SU1`4#̼TDC+Aٔ3A6$)XcZZԽ]c~unn#-f&b絃y:|4}n3p 7}瑶҉tc @Y0 ҇ERO $˛Dpiҽ`;tȃ5+zvBY僻q71sfs: jHv1ڒ q8tC'(x[`g:Mn}p7*lt́sDI F;Kq}9Sd$NtW1￴oڑ1FTK-,_[P&Dzo.r3+a? `1*nN"lsB@J2+i~{x$ snv4Q-;Pv)B}faco>T8 ,|yT20*_ h$ƅ2Z 8NL+&Vp V3H[1NgR$eΨ`6Px(vgq$'keD1 #O3˙B&NXx[`h~>nCG4fm㏻S@nn('ȑ j8d|Wyk ,rgRf30"ϜaL~92ƪW+`Ͱ/i`cz(ru&pEάA#12ps0îA6k`taήG;q@CG.^bfv/? n[mfigu{˦ƢFSkpƵߓi,Jiϑ|geHa,s]K! ۏ$)wfF?Nw]]tץ:u:\8TRA#&dm1&SPʱļt=e9ۋ{L@/uhśy; yhi> <Ǽx|:~y|wq_]-(Q:Zm{1רqVXXSqC7PsVQ\kӨ{ k~uɏ.%?UVHVFb2w10Ȯ!(d spk/U@Mʪuו5$*Fi)ӱu9v.ǎ{ؕ1n$>AVKŜ,:DzkD eu&n|O֗ z>D-|>]|{7O^ěm%Tz'%lzl5pJ% u ]u ]wCGK46 10<\Jm"W}=71Ҋ֓eW8Y7ȇLH>K뺄KYL8,9x!ȶ9(ERͮa.53(ij:P|> SHƋ-s `,-2}7\]ZxXe,݉ >GŸ h8i>%(` Q yFW@ǂᥔlhȝ\Bx˗j/ra%}+Vr4'Przi&Ԯludܖvs"Kc_ ][oF+_fwg%@1̼̠nh;-JLٔT"4yrΩs WogMevk$}<܍gG8{: сonne8m$ck >~nUUv?>~x j JZ}5{ԏ~Oseދ@37b\[ >=L&]שׂP2)@/=~ge[ ӛ$^&~JfF3Q%7ݿ=f]'%@@} Ro]'x^*2&7O7A`e4>7mqlfAFZvmVhN`kQ} qnsN[~^tY۱qt^mmG7Q0l³6zc l)z8h 9C-3imnf10J=c|}@ӭb(Uζd̂ !]fmsxzm: iU7G'aPGdSP}T\h$ ^*lǹ@t\,5XniG9bp qc+y, 7…"y{ j[Wֵ֕6b 1vA.mA->w/t@[fA8pnz:bF`hovz!*Y&[@Ⰿ&Vsfէ4Y o}X"Va}2v3d>/WpQ6l|GjIKe0t[>?xRTȲn/ {V7x9V2ͧxn<o?=̣b9Iu'Hznd1r(|BKǫ3ȉ*s𿍌32*q7-i5p^c!=PO~cNW"zNlƫ"(I h~*DMD?_r Nƅ!Bc/ŷf9_M/}\ee4K b)+0)0@(+8Wh˟× WOo:j-b}[%f哘s*-%1JZYF2pp,K#IeE:nLӡMƑg9r Vla␠bl hd}efZDcr3y~iO8n3#4W-):$<(bcj#D3EWoWfPQSA?'NnL'e,>$u. i >à:Khd, 7Y˧٠ܛ6`;+z'wjrx;@C&%XN‡)Sjs^hJ4h'93=h F )$$r匹;]EZ{x7~# pCF fPћ9W|; ܄cAL'7P@pQcdbjŽOJ@f-d0z>BBzGO.`h\ D(VK.8RߚL"4^HhɊˉA_ JPr%Hd~wrE-K=>6/Ũ-szHT%W5F-5Ԫ:|G|^T=h|ҏt?_".VmTIj67Rtr|J(Qkl/m u;81uL`RH9>zȞbsv8@@`]EG|X_FF&&jOV Q  "uAYJ'Az=hFbFiZ$syfXY<(Rʁ1=.ATBu^l!YX1bfa!hA^{o imkp(rwDO_F gd/{.q/9HS,Ni"2 !R6íZM[<^N"Lx )7¡xmNOA4}cGvs俚 ~ )pb`p'/IĮKQ#xS(] ԓ+uN_*x퇏?~k\Jk&Lh;/{ک2ILi2f f|瀪R/AK;I_Dnqg%4īn{J)&W[m$$ȯGkAd?SW7@lT8hC;hQ$,Q8Mlv9-V|#js}?9Sj ɥ%g$LYU:h< V(,,EHA3QJ =`84HĽՂ##)a=ڇz 1I ?VQhQ q1["LE?2(ZZ~~'@joO}NjC;M[ٳ9찒B~4 7zJ H5BP0%Kn˹lah9|>ȘHAQvڵo${3d8 8{#aMdƸ洎(c,Ku2*4_.xO -P)b]@dHmS DBԪŽ&YZ£Y9lÀ|å^}{z7giey?L(]Y{M7L 1Uzrw[0{@`>c@^bL: T뺅D7u*OF E,sI^"KD1 P6:+kOZ^TW)٨(AAIbT3'NyOCGQ. :7.`6mS*2V.7vh)H`2`5d  |Cjq #*oa~N@ 2|4cH9{/"k>_BfZ]7>>sBGO<$'m$jo"Rr"i+7koH 1MX]2+GXo;0Z摒Vv5QʼP.$R2u QO/2/$)ʊgE}/O!L$.L=Ee\dE"<1'Uc(b $dx>w}\.+xgw#51Xiˑ퇾}.a5"Q>S+믩rQy>4.GiGjζ;qఘ-gyTߎ8&6"*B0A5(jZ[YHL"bi\6>5IC:zsbHXl'kZ_C /j.^qB`GyH4jZB]W7kO=WZ%Pł-a̩kp!TQhWk(:9A0agvTDtꌴE2Xv;)F3\POiK`ċA$^]^Ձ+ dSB?nR 8LT<>;shp{8vN!  bwO0s4"Grȹ~}χP@tpln>| kț._#wr vӮ2שV2Y`,P> d,OM(N7f9lT_Nʧ}}2pe F5Jȹoh{Ge+uf]!vq>.i O2^w m+ A@T%Y91#fiF6;͎bdXH*?=<|^^|F4-{@Tg1s9𡌂t1v6ETF)]9j_-{ ހD`\zx(G]}+e me zGˉ͍Y%+NSj&LX'rLH˳.# 9fҡͶ A;qLTƤgdX$aTyqeb\{vϭ1yv3[كmQh 241:c%& A [q1T5mԃEItHmJ0vKRREXwY-bnys˳jnyde6v+%Ol=ܐL3Ƽ (#+Gȓ"DJf~i%\>+ ZېDE`\Xތoc|M$&72Ohfs7ej-kS$kA9LFL )ɝFwNs@`h 4709hp+KZp@!F8 6q0xĶx)}~:V߬-Ӳ:TZ`uLjA&u5sa.BTqB0|GȤ>ZXPP<۝AGU?FEd^];˜E]!`·_G^^8W:G3s}e,_&:isݛ3Rp} w!!I,29&V[S>C8P^{L&^Z_ X|ھ*rqF6A$94wFhv ?Hd, pӤFf'\_ ATB k4 AsJuH58\8vl4rJEk7$BG`n|Z$αӖcZwlk2PiI>bM`ڎº8FWqW6x ϙSuȹF{F`\H\Ip0=?w7|]6 h+fX^b߼9bnʋb #<*|&sS`+/L 55 5.hJi/1 S).7Є's-Z80Dsj4!6>XOӆOXkl; 8<僵:Z͝G^ٖJ9G|ۀ9HLN$58j-cb@VݹfZg= VY~؎$RA\2t qtV I)lSMMQ7'N4[D,o씷$tT]JV{q]wi:bKֺ, Fˉ'8;]] VeYtլ>_ڄ]`Cj}cqתw"O>^d(ґd{߻@9 u}bWRsc(Hh6m)lt]f#j( =ڍ"7F4wl:'YjnjM7w{[wrII/Lƈ)1p8E[.ÓnM (h$IGPyA6:@Åj$JRFZr:⑅jPI59MɡՃX_C5QEl1wH6Bb=&ujp4)pbYN<\I }rzYƣ>Q$cjA k69جp[qKkM'}Kh+9EA!yRZ: U6ÎƮsfquu~F3*>j\ڔZWO9|b\AC:G G] [ju3k;r^W DO909֠˳9P4VmE\vX0v9x|ɘ([Kj0 m!5pJA(6c>BD!. okՅ<^x^1SǷvJ}1BO YW#r%p{ə䬟mՃwwBwwio(-Vί..+Bj卙i0V?ޛ=P?eݙ UQ;\ބR=ԋH?~ X\}!"zi'nTjvuߎ[R0D__sm(9)5DP2<I;!cuV]/dƔRU>(g5|UB8WSq;UPO 30 *š#e9)0sXR&溱Vz]5ȐcHeZI;XA!]w0C F,'Ѳo!뤸xᘋ_Ϗπ*ĘCsDQ&(NJ.IٻmlWT2ϴpUnW:mWI*rq4֤dZ!HSHŹX..$/fVr0;10փ὎:ޤE86푕QE96Lu!U%|~%%U0YZTV{`}"QU ܣ{T~ᘊzO]ClBV=㼽j ,wäjTn߉qi?eٸ8U`s}0]B?{Y"MOV*E;]AvI+޲SD / µ }}RzENC솎%FP0|m,t+PzBJPeP[x4C Xnۏ%jLhכgXo 3ߥTTV? znk$u3&}jk=DVӭkmü7h6Sa\x` oE熅'R5QݎS YXN8ׯ DvYc)Ibim$6.xX(b+D~{+@VqjI)m#l0H-K 7:) "CvhRŘDZT؃Ǝm.Rp1_De2gzh"DӉ3ST)`[lnRՖʌzlBVrWyԝS|[7X5R˂?K'vSxohRhnCBQ"Ԋڼm24Q%)mhFŖxnC9"+l]dr H ȳJKq9>S|3T|xi˯V}5DKQaFZo( 0(/" hh*jYf^]?+0k,cg$m f- U[/߫1T ;0+A"F mT9ك@\AxX< Y;lLcM)2?c̸ں^ل-p0 J&zi%ft8eEv3նݩ̆#6ݝ0+ N \TmC-Oa uA&Q8>48>bqzyFh-J(CКmi})v&lN reȅB}ߟ9YILB^GӉ 68JZj5;lO~*}xA i FTq)`aXPqE},;+[* ݦ[̧ºm\IKI\Wcq*#0qCϧz xRh62<Zdpk?:ڻ/z%\aw%DKw tZ׫!#htSFI"u^_GCP) !J3&W& q}Z.p% \^^ 4쭐82f*ʑhf2!"paW bjᯝvY1y+à̒S$haT>4$+32XarXo3|:T[HaNA}Ge<* 9cA&H]lU9icc,\6WNԙ?79Y J^?Bj;,'K0Trd|Rn җ?4ߜm!9/ZCyt U6?u /2uQYL1Z6*1te3/!CNQNW֦([sXb+jXyI߬ eYê±=Dw4[P$󲵥o9+=59d~n؆%:]?^G~6Fa6w༹?mpʱ EAJvlۦcRuG9tN4C-P o !W#U]pEuH7/Arrmxa1[ HLjv A{^78nwЫV4wnst3QMRJHȷ/6E65kn"'F9qoJtUQul #!)F1L ozAwBsZ,oJDNdOT]qJ#ۋ4o'#$QTk\8fUYdžbJ0bz.q y+p`ȭ znk$u9 k~se?o;?:QtìzG>{İDeB⳼0ҔI99G$Ǝ6W> x!ǂ8gsMEJ🇅zʥ$YA)H'-VZm>HW8FQ(yG7҈A]xP\C/j4_}$Ͽ;>pn7&[JCz(蝍 r~XBNځ.ׅeb1 X/P1'Qs0A?`]'n|;>?-jVPv3Qn*zߌNQ>m\}#܅s៤lWpq8oz kp 9߲ኆ Rq%)d.c Fi aK}7<%O3 i:N$ yߟ._\~|v{19M^xqv_>_/g/?o+Nj/#muէOgwC%~>1|Oq~>;/# ycGtK}Ќc|jɳOXWM$gf[Ƌ'  2/߉:":iqXN㛁<=>!I!O/A[@)3"LSEb>)sJ$輆@\~>C޿?MΐɭO0VzuDf'Hb*SJV{> ?wA tT~ S7^Ν6[~٫3zoԧߠ4QƸO1 iytY+ y/*ӫJ1BH s z~juY\ĞJ?!zmLmzrovS u=H6zgQe}| p _tFtMzY]aMLZUz3v?eH '{?iމ]0Ldh)Ld6`]*'IHBbZ^(:6?A}\NkyP MC6`+g'ă(Y7$y玧4ǵՐ9DlG~J3SkPD|Hף㚹,#9B{Za7H#wׯLGߖ,]&VI۠B0*23lv yW^wˆm(7|!$x~ ܕ m p}g%sxļSXBw:P*;**?K>I-Kpy&_sv'3m5wgP$N"{5?B~ϙw^H~y'p*-  :nyNqXt23SoBMAxMor-I<,af١vsAr/8=JD@h97z>M);Nҝ\ oQ7--5?L}%й֓oyg|5Ma:3P:/7F7L$AO̢(Ž0B1ac)e$5DB UṄH- gS6I? Y'=֖Bev)̂$kd}qF_3Gc$3u ,M|y 7FrBil;=3gb3TVpvZLk|SxyqhH;WnŰ6x+40h\ҚOͰot6&Fp|!YMƢPcyTrƫU,Mk|Sxeqz נ?[~325dE6.H@MFyTyCbl΄(gs# 9bb߁ z`pUX?tcի\AdnN D+B$zDC͊"km굖Zo֌FyTy#WprȤX:9TD-sL/TǠHThFĢ Giyf.5QE#q"K ^-S$MR4̐İaOwWU[ՔfM9ZpUdlqt 9ZrZ)G+тpX9<2GuN8J*Ehyu3۟{00'b Yټ]qhbɖ;XPt l[a,\{NBNO{01A)ͻ-0L{)bm=pWmmkcQvЍluߩ lc`AVdZbk3bGT-rǕI0Y x:S'(w0I"WI&H)s e3XZ >w~3AQO ~O:x.w+yE(Р(ߝlDW'0M!t b>FS Z"oT)y9^<֑h9_-8 10iI!wjdkED-2\P5HԜ 7&+D\8vZs'_ei9ZKKL/4S U`"&E{#&¿ ;> '))ү'fr1ދ?EiNP*5)lM uJݝp/RrA6'Gr?QOI'L<}L&>f9#owFl^I1oLShO>LJ׌|"ZKZO!1 (ritF!\[G$N[1+, ݒ[] N6Vn$jƴ[VnMHw.d9P}y5bjgwvɑ1E; L#!"f\"DXq*6J17km`X crc竬7 +RKᤷJɼW,]sËs7C>FF3|7勵&8=IBA&N^ԁn𙁂=!s$c}8$sආQz.F#=zSFon&O#Iqgq1|y_#S#|?G.?CZ3>k^G3^ϜxIh5\zq߿?y{yӗu7:w{ys=;ǟ:Mo9N^qOxPǷWL㣫ONj.(8@lE;L>^g==Oz?)%_˟ބ΍;֌?>MFa?/BR6>-2 LǟWdbܽ <HYNMAU@V*]^NإºT<=r}>p8aAJNUk~|uʔ{LxAF5J|఻~BQi<952y؄a8?׵Yiν+6?>t*ф|@nC4|zŷ7at-4py8-ݯ/,grP^z]H :N>?+|}fv{I,Dj{ jn0Wxu:~ H _~_ YRӃTOBazgA^Whp11p9a\;E8g_OhzP|>?k͛?f37ܾiЩwSq,n\۽8 2/W5ƴ\ޑ>]1-Nt21z?|P"d{1df%YkͬH(yl3)mtu'^aJ$7aa4]P^^"e!|/7䬙Oun" ڛkY}g^?GTe2V^5 n\)j-Tϖ3]Jd3h9\M7݊12e8K|:O|s`StaDž$"xp !#³dxFp&W䴱&ɵATgJ{c1~kf¨sgw=fJ>S~aȘ1c̊Th HŤȖq ER*MI7ԗU+JB%6-ؙ$78 XP? 9kacͪ*:ZcgbVe5&~*UDXb)!b#( 3Г@=vFK%)N"X9I_jeim:^|2b 5<Ę1}1Z,vG߽{fj% k5ĚmtbknL7wnG+m@Bs%SƠbPEt궱vۦØDn;ZU5!!߹֒).egwv ~|`6xilg Y7@ڤ)KRLFAXN#FyFӡ }"V/rk}Ңƒ҇rd! ~̼8ӊL8',1@_B퍗xP[y镄D-mqdO LSM꧛>V-E-]{εx(hB %m<ځͪZ|+,Es3;p/b6UfDݛNwKuޞlgƛޟ;nEw8!ʡś"?X&_G۠&#Gh#(F9wHP`L<" q:*+50e f ,sx 1ARno,FqnBՏgSSմm )&t^>Q[+ݻdC۲!p{:gtR L_J-?`tawɍMt|5a+<9DQv;>.0K/X=T" m.u"I&Qlt.G ~V!QpF'+ ٧jHcMf}Kq2Mn qjI9)G̪ y<A`0O팑KFed/Eg`ҽ`4`1:Fk!w#]mSnW`DBM H&TZH*QKJ"xgo0EuV_.JM) ngo3qg'+{kF2&M[fqțhRyK#J6y\o"o0čSiUM!M2 нR@ިZ7YQo87ޥ2f_/LW 'z[1u$wR:Ezr6Š&%61ngiƔPb,d@UvcqnlC2ml1yg+lWΒ6/CSq~hYUVm?Z(tk쐅ǠPQ7;)i^#߸hHdi+;Ԛg%M3Рs%Wmƴϝt*֢`p>"ƈ>SU,5+R;wfӧ|7c_;;&b%HVi4a[Zh&,o1zn(sbA Z[CVз 4XP^\6V*ȋ@JZ6K!gdFgH93Yadd̵ȑ9Sm28rr]+VzڵmV ̓LkG88XOu,'y΋.s| N݅<$k|px@1 A(jom!Wђf2Z2V`RErK@$g&-[dCNj5T7n7U3R<Zߟݵx-U5u^*HDMc6D/j)3Mޖ HBY)UEo,CVe0CDIZYadsuʛGo')qM-F+} ][ymR)8Lp悳±>c|_sε)E[>l =Ϣ-O o~|dXBXiU>jޗx̏tgqO_fWnٹs# ]?p쭺Nnֆ] Fuv]< }o Z^Ψgm?D߱^"?׹&$ڏK4DK4 ;s.3H+x^XR1ch|V-f(,g2n0|^߻Er4E)]>/bjvcx?=.9{%_fa${У7D6Iң[ⶡG7@N?jQH2(8À`mY`Λiֱ`ܐV9bλLVieNZed NvZD Y9s6d,NA)|>N}Y-,L?..H 3zesH_v$4w{/BNk&o}mktoir%k5iGΐ&SЪi{ rzREpsr䴴dK .#3z , F{} :/fcAmPP%޶ΰ M#Υ4hĈŘ9}u8Lj O*G)IeLox3HNxE`3a1Rm:Mvny[_m8o m;o9cjzXv˕q D2 <ǥ+WrĮ(6[QF@Ls\7[xdMQ)4MGa6SZ/cZE?YlliэC[(KF)J}X+cVLX=QEV4./A bB~[-&l^XqOAƌ"XMsOپTɎZJ,ت0%aHJW)R#\6 Sb NJcƬ di#9v ,KVx'B;I*_ '~q!F2%r̬z)H#HY0rLF!LquLKfPȜ1Zx :oiDaюx*(;1:nW fczW;X[@UC']8M!r6*NFMA2^?ovFj#7pQ>XuO1Tص,Y*Z c eA-k`RukYRĤk܎# >a캮gin$*X&FU`ú(iqW O7i-枘J,N۾T/|Tɘv76؂Gllq}1"mlRpt p}_ާa\Eư7h˻In9jLKI]c^_+ɓ@yƘ!sػILۚČr^Aн 嗞yd9+%vC4wny_Cv7;cF fqBc3Fʹ9cz>ttqyS+l?_'=q'z]1\%r9m_c~4׬o~Pk){|p^vːm[Zz}v01ͧMM-gN^p:ƮFFŹk@gK뒉٧۽.+&$mӻP+p.wcZ0l.l?y^hE`qv/v\1tay`w]){-_}X(.P>+&=&z -k 01|KDnWPV;4u{|/zz>cRjq5xzܣHM#Bb$q$7cIh. ӷQ 8$P+ƈ-j5DFRRA^a)zZgU#=z Gc >57C͌0a=79ayVcclLsHpD>h,j32 I"ViعmPBo Z聽MM^ꁣT^_TϽpQn,NF"bӲ}vyWA~XoTc`J);9p=W!>DE2_̋)cF0e a.9S(fF˓9[-ry~9 ;v{wI(zqsg&tX&tkZLhxˇm0>C ɐF% 1G ^my-x[Ce\ E_N2v}p6{}3b\P<'5_>k4̊W!{rݐVehXH㑔˛Ej1ܟ80jK_&-×p<-{"4@s&BzWQ:d\^= FI?v@DÓ: R1+(V2e;FJN%6iAf2G.VgHѓmkиo+sAeT,wL m{*$\XݫT@Es"x ReٵedNr-`qg4d=4x2w/_EiV^n] T_zY 4_Gz䬏:fJ~ZZoAyq1ϯ00<64J +{;ERjtN>};Q EcD?|ϏǸݩbZoƁi]p+#?<|v؝&0+湣^qc(|2y,oV]^RllāUj8TDǶ#!4ٿG 3L1ӰN o+'H9h'mNTTO cb$B+ʧ Zm$UAvj1Y@\t%*vmMt7NւlQj{S4ً[y/^i\ bsΠvizY.|4bcGL7O2 Kcˎ}CCǧ#MOMh 'FToɍQ g AHf}'o| jW5Y8&JV]uIiD TCOj}3}RvwmwCOj|: _LX=+:a\\O{=QMJB+ v,$'"z͘z/JR:RXly;[u ֌u3I[7I\.أknRqnڄ!"6'0r5EISg_~s6ym/ D{%y!CF'm2w)-Rї%Ro2;L!~ ^6gXISg?20STd|$"QQ'Ӓ.B;De0P")ӈz[NAJ86#DM_1Il6x^bcg>@K)Bzt!Hl/;R# D*z Ncj W)шtSc+AUIJ]F]/N-î3'f,R-R<+;8!ʦ!rx`ဤi`4#sR 꼹 'C>h, kѠHc~vPU踲t-q,!6/ !weHzAś FU1LkfPik[Ԓ:ߗivZ'<*CV*>2EͦP-z^V\4՗q!-1uw-)6G%+,0Slzhħ-,/?yfƻI{`̀ChGHu!Y>O8@ynR2F$Ӝ!)O]`Np0a@;-{pn[TĎ 9>xSqr}f&|{I?N}|#lڕoacf4}Z*y9zF^G'w7Mfӛiz熳ثITK8,#3$',7o{=fjzaj_B;q GAd(, |vu'v_m|!~1afQ߷.|sȍzg̈́F 9]+(確[RrN-o獊GK1gѶOO3Z<濇p|]̟\ϴp+ӈ5a.cEE"q.N *=7^ ^S/&o8J% %yTR`)ӹ$ $AICj\o?jJV#ʿRqHR`X ;'C'cyꜩ*v' *úsE'.(n⩡2[DFHM\mx' [RFj[NȍajL6s OE 1UAһy7z\7o&8JiA\G&U7 / ?m\ Ɓn-lǩK~͕%ipxsWxm[~]c DB+&ּL?8c4ᇏae O7Nv1JŌRXsnA3єMz81(fiCԽdn{q* ѴŜҦ5`pdL*xl HC LX'`Vʉd*啢pAyQ'/_ym#ȩ>e@h# .dK1mQe8zJ_nx庛~or?ÿ7FW.^aN(`4y_ce?oz.ڃ6^0/Qw,ͽ+A^gvoB` V9)EA㌋!NiZ߯CƣgnC)T*hUZ ٤c١猭,_^kr~\ u)EYchV4zDEޠv|ZDVLIZQ`Q8kHVp8) N3ZQ!d6pVL[aX<5] Rs %?c\9 ţIoƕ0i,_̎ 6Z 95'ZU<_jPq<;3{?+#]yAt~@ wu,zͰRN9o.%+9%`j.j.Ԕͧ1E~8FgZ; a*V^f@4dc̹DcP,T6 )JiWgBa$RvV Scb-(Nm<-% )5U$!?pɥ;eFTR !{bB68xI ;V$oᢣD\6Jg__դhơJ,50g (ڃY8E)GI.…:XE+|r9 ]^xX/y<(ݻ2#M] . #1XoD6vĹ#yҁ٨b#Sj8bU:sޕ$U^@wk5nAt~ZY3tE9 y A@%'VU9K !RR -r+wwc]BoO$5Ҫsr2XkϞg > *w~==l?_+%-GKi4ߋ- V@L*Z_!٩䝀spPRn.iAAE'Ec[\X'^bw;_7^W !Bq[CoE7]R}{(nS@r>>71_,j<Tٓ9HWYn@'JR'dS:2JșSUΧ_g6\g|NsQOt}̢x!Z Lj֪/! /jV62Ed Gq)l< oӛr,t5*@pOa4/m& vVe32pf'_BWJM+6U!LC'A [g5MF+?-qPf|M3&z8EZp Czx(RJ [/9ANfboni T'e$h1Lϔ\v@X 4Lghr}pk憳 y?8d0ɜwq|NSrDps$W&Imr ::I}? xY#zX%.E<֜Zzj|z-OhG2E A ͳ|RCa/ 3KdNȅvGex[$nz6C7V;N4YXJ.EL;RD.Yh$, ;]혤{Ih{7hPj_7}QҸN RS"#T#DHIJCDT1b8Bh#Nđ@)GUwL*u'2zsFXm]vVxD N'K9PMt5'2EMt4~ HdJs#Ek&HN&N‡&>\chu']b~&5uY,;+KYq ̊h>of;"~0teD QmHoW m]bvA 9]Oe6F$ "_3>z%{DGMO>&^i^2!qb&tA5Pr p[趑Q(UCQ h|.t{'PRO.}W9"Uk.pbSK_*5WELnc{L'-FBMJ׼HtQ*PkJ{Q,~en08i,Z}7:vw7Mfӛ7Mmpg VMr62'H.d_Xv[ϯjhjXxq #&3D8IveoO0R:>ƿ6Z œټPCۼ6 '"~ZGޤIhbGThANkuۦLOMxp='X"̕IƜ {gTB^'xȜKY.h pDظ$BNgo鶓",M%GRfB# `h#ekMTkJGoUE%kÃS@^ (WNC^*V{Y$zDDJOпStW[|[\ *.4x)(0v/dPZOEa 2`4IE$MKS|0>G;?1;C2:'aZ.,P61EY`u2&"I.%4)2,8tSMIɢ4}Ԩ_QV)A$Ȁh]etΪ.:F1 ;{tp}osyyP6vֵ"}B<ǓZi?]~w2Q}̕[ۻ~Qmms.C|K'3?сdSS/12۫_fg$gN/gntWdc-L2i{ǣ?f`Bw0ϝñePiE-)XF*gdjp KeDI=C̿=8[q`ݯ5CeWknzSPgt~|y:&΅l} y\B׷-[ț䬑c.τ 8|2v=CVLRf@y7PpcvLٓۈ:ѻyHiG.]]Ԇy411?s4; m2;M?ٌ1վ7_~ DmC6>qͦk6}M/N`$LSLjokg!P3L c 7A&Ї?ٌ0DO-IE)bkeHv)tA`h(N!4EhVU @pRl(e2E(p֞"A笝lr>T|u="N˅*i*Qqb^jd`s%BQ%2= 66e)~ZC22 |6L猢u% ^y?{&a9 uCгL%ClPz9@lcaC[ZP" rrgjC0 R(̍ p@$N`NdJ9K[j.Ռf,B2ȪIH t=m:iB`<6֨~ }4_BAuxtr4d]؊k%h.׌^N0_?qb* /AUjNyf>),r/%_MO?slSvfd1^J HߺP^0Έ5Ey@fڎh!W([;DQTS ӫ7kiA4jD_-JM}Pgħ3W+9TT$YHQTV:Xgg{Z"cKD#MyܐI,k^]l"KA)ے=ޖ@3Θ-rA`E|؅1Zgp{3[LHk->\mnb| PVZW-3Fc} cRݔh5f|7_ۯM{]] Sk=Ԋ3w傇DITfka^ZzV23\x{.12 %!bB2.` #;Eò=tVɽyEs-Ǚ규N4ZF03-B&({j -eR=um؆yzHJ"+=YSj-o! HYv gTV+0Bka}hK-=K+]X6|@n4zmmFgT¡DZkt7>Յ!kڍQ=schUDC P1։V"%jW-ך γJZЙ ۚ2ZJ7 eLV5kAfNK-XlvNsJi2VOU͕RdiI~ySNyfI %0V䔫ՊEDRNi'BzyJe~nFo.36ʽ+ok2B%٩É9F||>4{h52\<7y|}E<*~r:7S*Gnw-Avrv1icᔏtgԨ4"yfLs\wz!X!<7ވ|QEjv~ſ!F?8/KzI8长b44-\(MhG+g4&gZ01z"ڢ"~9Qsmt ~֡qootE##VhvĎ.ޘӏ1ah?^,q!TF׈1G g,dݸMovAzNfII4FJgVwy$n5)cC܈9ʆ^bZ ~tLKi2 \W=U0u9)&u7y(sYEq5XHuOɁ=\dl\u67tr- : ^;9ބp{% e`\kѲ_7_ n/V E"hk<0'V&d4Yؒ5X%`>8Mt|3vr5,6}9xtzym[_2:\2wC0J#~=p>gMۋQm"K]?y׊Y7w\FߍG?r$ǣW__\F0IOn[ypku~]4zʆ5jrjt q+F vh ݽa;_vPtbgl{_vl!EYx1شXY/*olG!F.hZ.KETɹ;]sHwG*gh|CC,x nYx\DT4P$:քSॉE#:B`4硜~f<>W JehoM2* BAƨ2dbrp`TTJUꛢr]4p$ӜԜp}~PLTTeBpX*91ʍUmx`BQ(TZRkwiu^z;^>oE>,قՃu5횿[ze|\X6vol /Ȏm`G~qHXgYL~]dYlǫ@@ q9>9A' d:ޏa d!@xpNd͒;e(/L.{C\gbz Z:m $<=˻ #_:(&8uVYjʲB靈6/+a'g}žS^L1 54&01ZTu*P{0NOg o)J*O, \"]p(npJp۴<ʳ [{*7;^l>Ǚő9΋)wA սς" }!M(xPul8|E6;' PxSF8K9/rrp#pioL:Ŋ ў a*T7_#JU,/zd f<8ׇDœEG4ZPyxn}QXk~"͙t=O MO D;k%7e#o{!\1G!F.GVsԛ:(Ф*V4 $W}sr2Q%TB]Gz$}+YJCGv8uMIM(94EJ^/I^LT>xr kn=l9rc+ǦY5\5أ?˭{-r>'\Q*ϽX&sQ{gv>]_v.F,(6 8Ndѝr?عBmUstCK촠';CO OvhZHqG۝ev\-O؄; #"&u1["c $ :mwdRLJ[Fd;{r _#*_A3RP)bjKv={jswuO?~e_Ww?"R3q'rj^bӫywP)9ϔH|$拄Pv B( Ҍؠ@#9") iFLM1g aTͩ97נXQj&sA"25749)əș#+ceZI݊w^Wl-`8r?a^ ӭuv_N)NJb)=dBxYM+C,P>?.h鍊X۽kBwOaX Q.GP&e' 9 9#<)GiA< nZ_WUn 7FD#;j.Ϭ#/%t5jAF[gV58*O%+uí1o@dz;5n8c=, #={_n7-{%/fߟg0 D1g|Z ?r=IQ{lM,-i'-)$K)W. mm&IfGyH@\CoC0=']0yܳǓ7)[3ʬ& OA d̃ $ǁ9]_^J.M°&F'c̹D5o?‚Q'sYT![50;GCvḬwTl)!Z:ST;>?$!!K$dqV-/TxU|3޻S7h0̍=} W&lU/\;W`ؾ+^>! 73YΞa tg[tKy䍗1zw*oq4O> JC[ǷW7_a,qk+f9UG͝fsܹk6wk6!bV)&#Jp-3W9qMN0v̰e wj (D9B' 7\I;iOWK;-5'Nn/hH*rv>-yER-\UCׅ6 Wqa2I׭94|)`1XkvjKdap QoNG]@-bnaSv̷OLf0(kyz뙜 -{lX y|cn\MonnV`Zd}ܤ"ZaRSP - QD}-E!X kXO>fOy{MdKi}GvzNV*AVI" 6L(iHS!HeHС=Tґ<8`%xNAqryg 4C)bL44TTH 5e`Ta'go? +)vT^˪ !Е!q BO"z&Z>} C :Zey"Ys #D Ui*Z`qfX^0,2pʉV!Asbp Xw ٻpp%jRKVX:^ IDۻ ULI&U5|rH2&Y]5f)\i#\wM25"`ihY,I)aI< U9h"˥J10BuF@e֊`aQ0 a  n-Ybfp&AY"ωb%g>co,(ҖP ^Cdux6Bw]qFR0h+VAs8vLC~- 9 U-r-3R,Tq-{EߧX`3pv9J!ty _&Ri{|U'bgl8o/&߶~Tu:ؓ;S.ꬼ`߃#6YS<]3' (!G )-z'RGg]C}xpO`⩼5;nAhس{]i+qYaE+2\W6SJ(UOiqPW ]Q}C4'^-z|r>*RbI ub>CNQu/%%"9wЪ}xqHLka ϥ\}Hk iEhxa\zneks{ -E uɳޚd,b&^\Uf=u[=kq +3n櫧zE`OO;~2*Z Uӣ:k%_e/hwmy ռ=vHZN"u,TɃ j a|K?*:=ei̷^@gDp4:,vӠ!ow/VC^2tIjvɴjs>}.MekE xDusZ{ ɣ 2 a"$ɞc2wF:bZ}#.Ɩ JvkҿKM^^ۭ+Zr%rw1;oۇ3j>ki,xw]ѝ3p?0K(*P_v=4tXuQAY' cAZ8,rD 驜sXr5\2)8vVω:s֣bʩQnuTlwQt Q1'Q^}DZhph0-@s˻|`ѧvy\j!^}\SAf{G/hE'gצe<{zy̲Fkt9)oÛ[!y>uy%fUp.rhԚ)ҔTq0ʥͤ2ST a^c z-qծnqu[^+FI;ukV͘ .QY,C^@@E: pylU kvN-&UvAU::,0cdg(<ibaDSv> FR*qӥ;tͩ)ԌҊifKYJ ^cbDn2lNp;U,ͫ$b#DRZ<Tcͪiowvm-a?{ƑB_ K6Fgf!">{߯z"/qȞ!DzyTk7"1ZȻG8y{hԽ?歐IKy gT.6 'cX4*_/e40uGJy$[;5 +ЫaubeaG!R*Aq)yǭ=QbhD1a|YR٫{|]2? g 0;i:5*5?2S}ȘI_ÂRc(6ȣXJ}6< L EBc̈l 27(gk1gF>֮".zlmʄ v5:tM1Gr:2O갼$Uf1cK'JQ'|r. Y4 &$O9Hw'|>O3Bg2⪼>ÈRK0)06lKUxwvHhl% ׳UBÀjRL64_t `䔆rǪdT̏iNJ5N7\ L`t1ձndw3lݐ4eK9TE뷋O(u a@v$|>z"ST24vQvgä.bRi3^Ol1ёH* ,hjW.12EYՂfl8n~KFL 5vi&RD#״, JeV9PEuľcs/,6ŧV\ꐐW.12J ʧf`GaU>r6?g 4!89ry:/U3AijL~>µ5dLߣGecz!ؽY Ɖ&L (fPZQ~Uj!) RԔ2Ϲ(W #9*(2[wFh)\{j(O;9 E2ɨ (ȩfȄDSSjÙb8Y# okajdKX͸(bD[$$73) ӄs++1'޾붼Q*)ZyIf6} JhJ} |42֤4Etc粷PnZo?c [<)52J-">{) EK;wJfy5}[C=zU|M7dіjV2ZaE{tb֪\;DU_;+Iw_vn6V~V%чD|ςH3~W^_tqoܛZ~>߭N񂕦R_!y8)fH[= rg%ͬO{yNeITDT Đ/~\h)Vn>!t_}'+yC8OU5uSLTC#8/Κs696 vcY5%*ZbR#xТ1ĸ 5m s\+'XnWEx%rωsK"?TBxZnO*jFG&Rg@~}duXfKR댚<{gKYĞ KڧkKISg0|z5rBaպzj꫐PQL.aIs{d)PZ&n'1qu; BdzcR1G 9r4@kGhUeLgR&gij4TɆw<̆%SǞ|bT\*d' B~N:qJx)rb\!c;}j\ѢGC+ds8K> CV}vrZ"U.oW+ڪKV'Kd)fd=;κ@ :G:6S6_NT@L}75h<x7 vB]{+M`A1w GI X# *MF12S*؃jʼnTJgkЎ2o6)/EםiqA״-GH>3B]Ի zwL|aUhåb%+Å4rQL]Ş!me}Q|ľ|8@ݧ䥳y+BjQ0ޅ;Ei 7?ϛ5.-SY:̺41˹Q"/ GP8Kq,(agJ9bZsѽhx'C0n>΍F.ךb|<8. 36OٕrX?+WfKR0.yrnxŐt: 93$9(3!B×ήNd#0Uxy՚|h()RsvgC{UËC8דGgQ1nqF^\CK\,)ݖCX}XW?ۇauƽKoQ-F4h57tɵh[%^Pw+Aݭphu'b+VkT=R.Ӳ,zw![Ú6I GF3?%L3qpG29kk˼VJh"WvxwBKayRhnŠ /%dRozf9FheJ2Z- OȦN8kD,}!b7JY# @ ۖM bOPS#F)Si*2RA`ǭ! <3 c'#+e@q?'7iC$uB @)OZs[ ~UTFIAט,,)BVUTZXt²k@Ia>J^Tz$ʌ0g0Q&p-b Hu:5VB9 p$pJ*p1*$Yf`Q) ʈSӔ1 q&8pFs\ }1{ L=>3D$&3G_/o.YSnu7-N??{zhɮz` ϓ~Mb",;7ǰ_+ȋ"~ Z֙s⎧C''.2Bhio1]YZR~l8lu}홉/r#TCVQ:7m-!?LhD `>"9=,07 Oֻ`ҳLߢ1mCR~A; ÀhܚHi ڌ;&O`.2Xӟvvw+ώ܃jW.Q2UaVnUGnTwݎhK[jMkVnuH+Z &&.?_ebPsvJ_j^U W銃ח6N*H "Bq8gabOg=;egBC&å2vu3t%xrN XN:n!"UVI j3pw^ B(R-|Zb"Ś -.Cs*f? "sI$d%J/S|ȏ.桘 NAa4FȊ< XԁV,<^Z u-zc,>3,HJu Xm:HR3R:1Q)pST C>M`Ƞpaı#Z JHfj)C'ݾWd %CS좁#!9.l=g*Λ>&P/ _ɷL 'xe/JL~I/pEQ#F`\E<a b1J;^I_163d=J2&$?^tTIfH4*!:Vά֞464K'ѫnAT89h?1"%kv7sD!FJLQPJ?ޝUBc˘YC`(c&ʤx`it{?-OIDpE%" rtc>1(ê2^&leGf|P}nq8<ӘvqxMwMEg.8Qc$KX]a' $$u.[K{>U2dT;̤Z,``!i"l˔Cho1af^+f-xVcp ,RDg ŕ 26ҤP {3G$ccQ#` 1iG8SEiȚsk5ήbT:fI^R? Y,@:3t9>3!QJ2Hp.8r-NMXj:eaJaejC [iRNfLad2 p!ƴ> Z \ik' eH҃mkILַou7 h_\|lLߛxak҃{{r`C*\j/]h ǣOfLU2=*w`vy,MEˬS\OBPusFH['Y^f%~ +]QOl.FA8\szToxGRܠ٧d%tb-Pcʶț[ٶe͛ȓ*gj{ȑ_1eKఘއ-vp_cMc3.߯(v[jIfwx$,>U$b=c^v S8Ψ;g!)6G{=rFs5>_z],#tsMF,7rjYljf=[-F/iTHe[GjC+E啎[*LReX=biFYgscj&u-6![eãF3*d<cF7_k NP+8N ĸwG]>:fI CuaO$VcnwsbݜDy Zdl.vwvۑ`lgLa x | TCUI5-G*g "W=!D5ڶPګG_kk' uɔД`[=AB᝜~`TMoteNmZ@O7mpec-?]@{DlJwoKg_׽&t"B(xE1&/:ݣցxƑ9/rFdQKa L{ʱ"D[)(VH/p*u'Mr6x7'4=Ym ݔNj3>,7r+ifR rL6 oV٭ MMASxI<'9tLտSq߷f3˫wof6Mϒ;۴]yy9:K(Yڸ+ҭIɂDB4RKJgI^l=gfQȨ?xLw! X{ j6/KPfCэ} D{=u'CJNeZ]g覅Q;РN>\Eʨ8$ߠ '32|\ O9.Jůg_AXux"cBDqONVߋSh=DC @3::^R~ F|Dl߮?旦Ξ6YEۏGK|Z}F֯8J^`|(_]j Wl4:sGh][64 e :x$Vj_{\;EYcFX(x& *zpJW1~Weh`ԽO,L1i5R^ZJ(XjRmtղ{ qW0|{*.o_O]܀,H֓kNLk{Hǔt>\>,GXn8WfK[%]sJwG\_,'VG7`O;Hg+Tx#]'%mս`OSݛGv@vH5 hUhU1y"VJ m5 捰h d,SFvI`V=V/QƄS3f8 =hg>`"ÚxQs6i/oSAH=,Dj˜R+L`{ADr& F : eRhYWL TfH*屬 ucD?ZIǏ˸heV{xrQwW;"18sj k~zCH!zx!]hP`רZ\ڮ1',-ex&8FhYYl0= K1xp"HR! "$#=Ry1vV=Ŭz`ڵӳ|=PipS(%)8x,6XԧJ3ʃR .RL)SZv%B+U.7KZv{Ir憧Oc=bR,1aSᡡc1ES cK&V.Od]Z(OziG@v ml@Z*Q5FlxttE6pHk$Uթ8 vD1dsg\yKt&@Gi0vpq~T|D50)MH83GTP%=ъ0tpY@G/4Hi(-vZߪzq1Sl)-Jޕїdž^sdH7M\Td%`Jf.dTܝcdT)V/z]N#ԑdJ% %|JK&iiG*k86fp+OgGoIM |)9%KhStk# |9]|f>,7O~Nޝd) V >[gAz4(A)- ;t P))VkTt:'@{'KUF'$A|+}&%%^ZQ/a'P~)y !}_[?ճof;p˰⪿:^Z)J\ڔ_,Ǝ$0{">Kۯ,?`A cU8Z/iPұ`mǜ] e4Xx9$)E2e`.Y^g.SO<]I3d*Yxw=F{gIIA㋶cafu%Cr+: |V2 3(kPuXstL'[lZqQ ~ PIMJWs9-a Â;إ>cU9N8nRS .f\Ng3Y~mZK]b-1=]%[rC_8)/n$ԤpbR+@t.)@]ډ>CZ'zDT)XԻ 2A`hm)1*PX^ǶRƋ |RaX10}hB94I7JBO dhOg(G̎XԵn<{ B,:?[N~p3Ȁ/dTՈ#j5kzGƽIYg-SN53pJ,bm>C]J"irq7ő>j!!ӑVX1p6j/1'vW C|\xv6#D# ed:,1֞FN(Xa5tL #Ek\,xfݱ">ƿr +!4iH`a1BM,fQȠ (reY.Ҁon8wcAcM \#J S E xJK1}b-X#x\鰻uG)`ƌS\ Eʕ$VP49 ;~-S`T)C/2 $yKB+(q#YO'gGr/xg3d/Y$"K^]f& o5)KDY$ kK4*)-K%GN(u6rMcDW3^Z@?]*}<38kG9nPAB]sI)u_~=Kiͨni&H(Ϣ)!])ob_M37Uॹ]SVYEhtQnPB.5GHtaS4 *9j}r=&7??[>r&!8^d*LE4epw3sآ~C3)U #"Hl0B0T! sbQYf)(MZL>Jd!.'Or)FO=xq)L vThQ-ĉ#c\;0cZw/-`Zԉf_~8^aӊ R/yH1j^eWhF ѽ\I"-N-kz5e$\T jRj&h`O_f|ht# 9%YuIqXc mIȆ$Đ8RڡD1 "ٖE^Y%4 1oetBxː j M7b1'EB0LSB_F?`~;27B1ޭvF);,f4a:v=h~OӏL `'}أH/{쌣yuY d,z9[e>ϱsGQXwI3,,_~gO=brVIpd5L^*ǤASD$^`?޺IJzOumǯd!'Ď+kjT\9I < kR^DOxTnzxA@􂊢 Zj*/*Bku CJAL0rX+NVDXR O2:䥟P]EYj>?^"wqsz!3[Uܑe]#BqDkhP4 ChX b#c~a*@в>:ߴ+Ep;6!/=WJRx+ Khq[$0&4Bđ=!/;i,c]V: &#Ea˟R=}&գ>3e-pb>˘S.[ͪ+knV]5UD]!psJuZXPz',vi)fQ7[8eUTDԅ`>2F!g4cB4-ZN>2~b}Y,ֿ>,3yOnnW*Ǝ wLf7nh]fCGf%#gyAU];*O߷.O1@j/5Q8;}uBR>GFZХ/=S9͔3nSy*W0D#t^Oo|9)U0)e<l_#вnH2ĿZkC5 h3uzdpI+FA;)a%i5;=:՜n9NaꄠdN1^T& *dz4jغg}oFl5Բ_<,imc^'viw.1Xh`6Wo \k?ܦQu)nB.bٙ,LzPg\ IBح}+{wSVF:;?xs.3\Wp);n.h7_ e/nuH Q/ʜvշ;n2{-";Fv>xJnMߛ+ꐐ.^2ը¡vtAbPDt|()2{=iյvCB^z%vSVGc4}%] ޟ:WB^Y-PG~WJU4Ҹz?Ε1R'h*Ow%P.O\OZW 5)/Oe~B'ԱX&ȥ OWP ux*;chaVp.S2+z]BUjYDedm~&c;]P&)5v 0^doooiRerC0 mFvJvem7UcJK[dmؑp؞F!+D\֕l HW&@l{ .lL֘t=3]T&=NlJno6Y^4:A1#Gl=awcR%GME$NF"@Wv?&S'Gi8yr-yWYƇJpTfK WS$Ot%v.;MO]yGI$DrhC4P `[JbE `S_&Yf?Ƭ=K?/^ G>#2h3BB .ESF!bwirzHWR m;>-*  LkݪIS@A[&3|7\`t3*`lXC`q9 mA. r ?lJ/V5^I=to}Dps.?>x75"fT{U T~/ۃXhZ3ĽD-/e[R T (a*8Od7@İ]=^o?p7 OMrɚď\g3'bF.E'ܵ7l)Ѿ~pT`s0.AP>'_,yC-ȅ&vMy=t$U'}RZi 8;d\Q P\_zϖ꘬WW. 64X ic.t`")|YyZ<`8$F?fހ-0M*D|' Rp#LEг( U*0[RC G24A!HmcT^4z2)!aFL2DA)X*ڕ =\;m``6v , Y E;WuĝKGF-0|ZK|-aR{É}7ܐ*X>|[o>c#<":N ǟ\ |\d2&`$zIX~7lOwls$7dwD] ӻ/}B1(մ丶JZ, Znͷw޻7Eoil>c6W\+6 d6wLs>4d?x5?jL`V|];r#4qy&X| g\~<|.~q0,KKS6?jr=ԖY!GL*"kw$dw+(Ichr--80"$J89PcWDf FLöI. -x$L -EjJY J jdž HH֠d#U%HY mD)TXʕJ1/v.Sj綅΄h(@ibB o,QH}"6c"oab#=\K>6f)¹!-Av{*E- !%T ox\M k|i*A+rlJ(oy&WFNv9.(P(VY?N9\";a'd#bkfG` zo(<'ӸUۯ0{[95)Ds.Pc׊57kCvy ZQbՀ[,$^6 @< @1*Ǿ*h7;Ĝ;U0NLP ŵE*'7 ,kQ&)|GI`^0&GFJ`UR8ts KJ`^#TbѤ,<1#9EdRToh$ x(fP\ ]F$R#+e?4BaHFR ^Zʞ{)#IXjG6uySZ̄&a< =ʫӬh@Ø+ņt(CdL4k%{cHFɉHQ0\C-rZtpyzmUႭuN V .TPvD3CE"eduC"cp&С$(Fԇ3rSl٣#Τ 4-1F+C*s"ɥGIu6WhMɑVoLtOhof tN]6/2-CsI 8=! ]>m7zPc']߲Pef6&"֗#cBUY.%USRZZ .B#VFHN!NI7%cV+^jP٢eVթVYZK/:-:qp]w,;ru`m\"YWnL[,=hݽJڜ*E'q1ۼEl1nNXJԠ؞J$c)SC;~ve_ x;ãRl5)f4a:;;h~OϏ }cK0Y~ܒc(>:C[ I4*G}lz9&CqDq<ʲ[g`>ϱsJRfR݅ mH)͹ yu6 5᧌w]|}m h34<8sgqȾ_f/}+FA8ٛii%((m}oTz& 8LS-k`:} v7~HoLnM$Alj\aB z;\#l&`E 1[~`."=&QLZů?ܦPw"wr7ffGxLvre ewey#Hzox *x #wSZFr :;3T\ٻ7nd彼>C lFShF3>_4u(t8jU,V}5I3,:4Ŵ5&D-cvT&੊[~2vvCBȔ>45&]nNhNu%h?T} N0NL>N:WX^@5ψ8ޕ<8kHn"cy R-թ%NPGKX%߸꣥>Zc%~-ҳ88a- b'<8AS"nd16pLϯz[kd ߶\",ES4^IExK36]F̌?fKRǪ{ BmbƗ-&+YqG: KLaY~b֦YopB0$q)b8xE~_fl i$I~^*0:.  8c xYf %FE5XPS: g,: zN$UĥQ N%'F02:A$|(/i(Ŝ\' 7Lzݜљ|s{WR?.>xOBn$97/~ax sYf Lcme8J S XbY:ʰg`M <@@8abbz0IA"LI޲0QcV6P18B_U D ;LXFLw',#k4R4(l447D"B-{K0Ӌɧl f1]`ϒ!7q#jmߝ92pjӼ>Xǡ͌n NJ7Ͱ:tVm~u.n&)v>R/G5{w,c-,8]ը]'XضFC,Ě]uy >,_LƃG:"]xkf3H܏ 4 N@)Ѿ?(&":ÁH!iJU>׺_%euO1H֠q]~n#4jWК5mZ3 (:VM3 L&~ۍwV&-$=ʬRyVKgsIތY<:37f Qm_5fdd9pk~L_'+xt80Uc~IY 9/,m&HA {¯bÞxazʰN`_> /2Θ41| m})p╝T3 off.Kn-eHa`S2F!w/ Q'tiFiFo_G҄Ջeo˶5ƜyR&1Lyx[H9aFpA`HPnq;`xuLF!;[:;PUoO[uFn0Dm=[Tn_K1>EՎt3+Gc2g5*NskOdpլp9 T\ y2%A0Ԓ8_ZED+JǤ@^J4⼞*ei"5iɵ ؖDR'AkWFJRQULeohsFB[)\1)Y(+g l1%üj2.є,f(( ``dr--]`x ŗN"d[&c~ӛߎD"^"0Ts3:[Lmu}$jݴx&2J,# VRbVCεY=] 0Mo eTsL,jh "G^NQE2}40w'IN.ӛlX%%i0(pTa:}y6rLߓ?ѫ&7#j飃{6 I?,>1zG^:+Fa֭tQQ0ancXFsF. 8s/){-g!Jf+W[RnZ>,OdԕGQr`᳡ kήѭ'<|V{N_wOxg)?\s\ h^t%ׯY[2#W983l?H(=qٻFr$W,vg}Awb13Btn mXILVJJYL1/{\ \E0"x^1?5Y'Q8ghc6ߌWOns!#LPbnz!Az%Z=-hǸNs @|ti@As @|t@S'jxw:D=EyoC&cA] џ:)V7vTqfUvTJ7j%iਢ"ӁwxΡ`/GrT._<= $'E;D!9y%$!9y;H$L.zzZϖW>{RߍE% N?5z9r}7(CVOEktt W_3$YH`Zň.!-6蜳3pVsԟڮYJ֬s(Jխ+•fj9r,VD+q0e{5̀lt,]pڃx *SN{FqsʣcVc%z,Ȋv&\cJSʕ >h" VHb;dRZ NqgGOdo}YcF/ X+p"7L+t) X!&=[ d钄SWۅ&tv McUN=BfZŚW5߲!?lr`hY-dMhv̍Z=ٔ iKxrr ^h1y2Yΐ {ugrŢX+=&ENvvXcJ㏮\HލWV;5qBR8MQi ;LZTnzt 1ԫ7T06RٍLJ4]Lu >lA}.DY?=W[Ǔr/?<}㒜sQ$fT??}~&nb[y ᣿A o}<# rgሜX&a:_,׋ ;\cH&)d0 E3&3Xi;}S3vL1 o鏏wwV!X}{8H?'[n ޺F)F"S%4ok_=+{Ajp N3Z dPo7< jsQQfc07X?8)n^}Q E2usyv/fS{~EA{)C0o[8viy+Wy7]:ganr>bTl9|$1$Bf`?.RI$AEa /gp*(g{X*Αv&wfs+ѥ&@Kk4Zssdv`NSa@H8O` l`,d@r(A_yE-|4_Aͱ(lo"L[&ⱄ~?dRŇYq{MafBVdßd'\*sU窪u }j%l1u{I+xg)p^Y!X0Sd܍l(@ jZKWҗgAIh<, =Cі1zo/մZz=P%48y=P%$SD e9N71osh}o/AT wGF&5UI{6yk5dwMGE1U.Czz9?Rl㙶3*mg ?Zy~gf1#M Ql[Oq5߶}xp5ʬ;XyLf!|8׵>fiiJcƖo$iE\IG w^BB:X1_~D53z"yP|֗EoEH',$ӽ+m14ШZw+s:]vTClU tP4& UA-U]S՗-5n@w{}[NOnܐ db;\n.u*:-W(D2BR`n}; CIy Wh C* V't;C {F3Qŵ4jDHIJR 1q\7Y)뺸(ϽnCJek8CT`4 }Fv| (ך'`("ǿs>stJ|l~PA f h# _MzqVzխ3׫:ѳ%|^Yc !c^*];u7we9MMQ][)P>كGX,w%eݯQ"m$eT.I8M PʦHruVYjAq{p򥩧2=M= T n +ޑ?.WQI>M= P10AC| *)1"b`bŚ_4ThQOÈ"Z%q7ec8N; T.GcReN&5Y,FK9 T`u`y=%ϔ4{;pxÉ0ԨH:Iш"FWCUK0jQR)t(h|\FT#KQ0eYީEkj.eRlNVIGְ x7Z TeA{PnksCĜъ~:u@AvtWwwpT_&GХǂwuX߾幟INmqz7u#/X]nl1OR]; SKJF]Zrz 'f3,O(GX/ag?oŁ 4\{;cl 7B:iVD8 n2HnHZ+`rXrcG9syTLJ\ϷoO}чIqZq1a_>΋UL_͙Yn^;yOk<ܤQFnv'ޓ.tv'RIܗП;/ 1x!6'zºYgy-`BI ')"T|DӵNoހAw:KNO G %1p=)[P٬BKz`Ns:"̌F&9RyNxs*<ግU)pesk4 +!t0A5c5RZTpä؞%WtFsEYGL۶48`~^m9mL5ub*lS5`{Nr)1_< 襘ygZѺ7VJzyÔ5 qIeV3 `ʩ٠nq5ÑiFe{4[ӻl#nj>}c7)N8|Mxgt̼c^S3Z%fq7Qx!D=FT%CE)N8|M}8'Dj"zzw2Ϩ+iW$ec'C>ݣ%Ci gVcc|KS]G)CֈҔ+TT.1M% >PoΣRhejE nO|3Ύ$¾ѦL#_šEGltZHQ)~%Vb*4sHn(9v!K!9!4Yc wj 89#&H9#Z2ii)HKcZ8DZ/J3LE22#R3"J[aA rAE{(32_"}ФEYV?ajUIy:msc4fr=7O5M灀k0!aY@L' [`0?\\HQ#CBK{HU ϘTI05ciZ{V}*E(i&ѫGzdV5$&m9vry $8q :)GR+Ǐ`"Vb,i+ Zhrn11-ڱbdq2vke-۪HJXW9 /Bv({ƕE@),!0M+Zٻ6ndWX|9gOe/ÖlmUNI)\mխHo4f( $$f;D|ht0 Sb4I4")4 Vyqާ*J&BdSNa`R`%5u0==^lVÐE6yWOOҧ2n*|۹ Bp _~E.?bTNɢh!~E^~x1%3`;&v |\㧧bINQ- (ewqf1h~hOIJۣ" ꟟-3@ L+ڣ sLo}QIŅu׸5]n}譾=;bN\3GjP(I}:e*B"re`9<)PFI kP`iHQMs*>B&#}ȇrƉOy".2 $p8X:PNq6B"ƚ;`;M=0 J5*V>t=eʵ=8v̇vI9Y!JQ,7#iyVPú"OrEʥ46<Вc;)4s8k<h@d%.Ah!7cwhay{d%OM2[.;;QŲo2GsQ2 ՝AbJhk1HoX:/~X Rޠ|x P!"MjpئMlp9?'cȐ.-{Ӿ=aRp5$;IāRGRXno֡0Rha) jIK&$ҽ"ys 3yQ7xy֣ c=muX%m}ٷT %B1nySm(:t"Ku*8T@WU=<(UǯkSxxkt]=* 5N_"Cx;;90!c"!D3%I.$r# & W&y^3` _21*m^cTO( ؒ1tV? ʼ/55ab"k ] Ձx‚ujNf+>x :Nwڳ hjº׽( BP,&^9 6Sh% 3nemz/>,mԷ }gm䫝^kBI31el{2˧E \5;!y>5Dx|~1?&fۋ?ߛi"\ߥXH}|o_e9-)~f@*fNcBqi{ݳԬ~d*Br5\in*ӟavq["Bߘ(T%c*?$~T0UK|c8gb}Gc 2۷m8ZUC$ W}%99IϘh4)3R0oʺteі*#'C`'tɡd',!S>3 5<w;ԧ,1XU'\>aY.gJKGj/ iVT#8eh=dBVŽ ˄Se+{6EEnpl*+ A<]R/V".Rчu7RB[D?Kv?1}Oˡ9 |͋?#aڙ`Bf'˄` EpےpG.gxT:TѮFJJB3z"SÈa܂'m=.8RL;Xvf"tkőm3 }%?wK&}:f%~W?&M 5-}MqdgqXf9jF ^%p,3‘ȩfD*PRJ5YQljJD4q1U.y{P.f< ɕI?-K7|_t>Y%j3Ķx*u)NBI_\5S9S<>Ч!SZI3,2dF(b)\42LŌM'e>h&j8#h5L'/ f/#ipzSϷG}{A6oy&l,d]|8ce p4tT7MR83Jh})za`u2|f8}` )&YFJSx 0T`T*+mrK΄ah c-yR؝L1U-QgX]{M+9!ƕgB0iƮFbr' tTH][V5JX9,GمQXki99-D'\ьG?!NTTru^3Bڹv]n, w",x?~*LՃCQnJoV~cb<҉oVn>xŵRe[DGF/͜ ˷M'w`JfãIsRAӂ!bK,Y"F{΃%(r3ea;+jq'lZ4Ғ ㊤!B02WwB:C\ ub6timMPp'̆ģr,=\gLH40lڝgH#-NmaNR}F+ghwz34Cw|}w -\ZG Oz},,v ҷc@P(z(hP61ghwl3j`"lڥs֐@!Ikw+f*\v}ZM9#*ZY4: *ިpQFE-U;.OO{CNpyǠ!M2vߙpjwжx6WdVl,SN=t>űZGKwo:lnƳ}}_|N|Lb;9FEM<ٕdM}s=z:=B,[6ɿ1n+]I,0D,OT8\I&>ȨspZ\^TjaPJ"ѝRsQNroAT UKY'/EhBC24=6INXZQ/[I;_OW!ҝ,rGe K4,(|OYBcjPI6$',z,(Qպ|{`"ɭ`6n{_PmkBm\}Wy@Svz-D§'Nt凞[)%|z qM !\EtJX7 }1p Zuz۝72UY7>uh+WQ֩P&bCn;XcAbwu9ZUK|LܹSb/LM>bc0 !fM'qBЙ 'h&'qBЙ4?8AK+gk8!LMO{8"Hq' SMxc)O.NHYw8aBu c0 ffq̰1N84ح[*/BRj,864!ޒ\2{g5R2{Q%Ǻ1%"1Nw}Ԁʔ8//k+TW<1C)\Njoo^$C #KlPuy\-Ǣ ġ`㵗} 8 QqÂhs.E`Gc@ R/$}b)l1gHq# 6tsyPL-P^ϸĢkؠ.)]S\^puba*a!BtklU8̪@ǁO96ڼc''kۡZwpVD-2,dٔlW?ݗ) Ú1gJ ~TYL3%3%ʞ-֊\&h*;n`֌=⾡QX۱-Sr?DC.q;OE[ h% jw>wJ`ՊOc;lR$|k5-%i/; ɉ QZ>^t 2g`UF#qR>5 ЇKέ ?#-*3M%4I08z {V {Lm`]egH#eީ oO,$u?I9_>c\ş~ܧ>yj \)(^ J(Y#dB1(n3)H" $dINzGq>!413hS&fJT`.IbXs(ae0s)$"4NP ֜"d*3)|͢*|z6)+ɗp 3Dņ沙⫧'Siw OxsWh<+>.=^L "bex?_y;7r%noo?^LS<+ t^c6Z,Y|r y$Tk{;L:ccIɿesk+U$nncPu>]Yo#G+_f z{bBJZ}"IJ*R:X8*#Ȍ+#"_ 5k31  ௏H@g'@u)xM{sUMĎWc,,,6|7w־LZ2iˤ/Z{nb3 nn?ͿO2oy; eYTQ <.l40^Ἦbxܝ]r賴frp{Xca;S0uʂjR,/O> &3'AȂml`dQbke. .ymI2r@A<9A"l G'BZ"-WPv\hiMX5,X4bK) ,B{ ;yX*BK"$ -1RXS:u"9l>ű&:y)B \q~܂(w^s J;*F V&1VQjZ $ QD|D;t{$ls{= .`)y(dę@\ϫ=㆗RI!3W(G%"z-R<8& ^ lc ˂91@3𳽦3#Bi1%e-X$Q&́ DJ't`N $wz2 PB+mhqFVՆ׋!.;j^ +V(Djliy &I|rR KXX ,-XXT$E2KX:k}W,cnE/)^c+&B)JH;L2g?p@ t$&7_LJgY>ՑSxMď^_9=_?WB](ks؜TW&KF^MlVbN&FFPQi2" F(S`TH*&([QÄx>:U`~wK]Odhd, TY8Zmy57 2xR& *L y1}Z eVVύ՝Y Uxv\R]lM0.~,ߑjMӽj.mfFLQfMkE/U| ǖ3V0ta :уq^l1_e0)LL$N.e`$7&}k/~lcHu^ЙǶ6sk~圣H;>٦n$"ԓlMiN=;mh$ݵd44.()\f_Œj$'72'2}JZ%0מ]iq#N]擄;l` gpI&X3,pS3s B2B&ۨ{"#* !ϙB Z0X%-.:$T|NV{^̖^yD(GmuaD4DF4fTm{^Z2iˤ5.ָ,krvC=S,S]TuhV 3+cR^wZ>h梦# m'4̱+c"CvWN"1Fə1ӞXJHK͞iJ&[uw[V7`m%܆1gHJTH#=N#.WyS@=T` ji$㆗2M3${/9ثݦbAXi@=Ķ)wK$eT醛OOa&T{]HQE |9 .;bTgCW/iαzU|1+rPr/<>>CUVۋWm7RZg~3EVv3ӝq3M5cQKRɹsV~6k>'|.j+}`Ξ͵'l_B. ;]ng_ 4ͧ 9B>Wஞ- ~l3w;X}bEjN)dpq {_YR#yumPg^bS@G%Z9/QPZhYZY7/Gs1\rfkjBUxrȍ(|5V{w|Q3~5Ou7q}+EXT<ݸK =B9G^ʈQHhf]Ґ0g\3_?Y.~Q* ,{qp"~|_]%o:IÅ_gl.=Хɽ<ַ~@`@`H yP{q,&~> ޼HŒܶ=:MtU2P ]E$tš]' TeFrYCe 32ݞSNb: "Oj " 쨽Tk;ӢjJrq!4$-La1DFQ`C׈qi@TEn|XyEg4F2J2^"*0 rQiJfSʭÎР*%;JBۄTVۻjL+Űyը,jDk E&bDTuiF6G9[RA9LxDz]̳6$Ld*l_ous{;|,V~#/5vZ"z5k.K-lr9H$xU>)Q87޴3D4-wXptCCj[*̩%%ITk+C<+`6B Wp&{b^Īȓ5} HY}YYzl<|d&8DQUvOIJeg"5ڧq{PS[46zUUEk-Q׈Ƶ$=u,;/~BE ty{KbV}fm`X'$/X0FA 'k^}'kܗ&eoW) MzL4ct.ӻ?LZhl7B IZnvHhް>~P }8f*D +85϶*s6r^}m ȔaN$\ξ~|X0J#!b; Za f .4̧sSHb+L(+r^ĐRۭ` [7mt)S0{}'{UUlڮb{,zRV}&vA13{Imҁ3vQ="Tjw"%rkPK5 ;@ JYFz&;dy*&O \YŌ-6`Do A4M=m9/ێs!x,G=]u c.hޛ̅ 龩Wn'5>\J/C?YrfG"jR5)e%h`?)eGKs9vS|>!N0+i- JH^cdr|'7V/'T-. v@c}^+4uY5>_+6XgR3|p<"1ē8GZ`BZМ$ Hly-uuRhR\tCOfu.(%/~֟cu,E%A%\#P9 FIkI gGpt5˜IZhadPE$W J.g2=+^a_}Bu'%>|ȹuKR8$,ڰ|!)[8RHU7ӡ* ҍK8zmƌGfZJ'?8i˵>i \P)q;oek@V Exb'7x9Z?y{RB['f\ Nv& RT~LkPO`/8̏VWZc*Y*)j>ϧɕ[uWL_~8/a2{b_1{b?6̌Ap|X8eϩHdw2X/967Vhq $YO&`K8vF{RtxL &Z_ 5)3L}W^|ׂHwܧ]9%Y4:^]xa: Vl'P(~ /Se*,/7F`A0!AX&,1! cF֚D`9E* )kb,a9|ROYRp*̈́!)1Dqꘕ' #&2.dgip'vOG Ƅ.뭕v 'm{4eTK*iX%d=ZOx?Re%B\&JbD"3Ϩ?0cX4LZD0$X)b?Zf]R.-J RB?(Ji u\jza t<(N(Seөp^lJaY _Q8Uȵ!X(ae̸:㨏a(%FYMT%)’M@:1f2EUJ@zTt0뫠zɋ W}'Y"X36IBP"BqG1Ưs|3@)`W~;iYx1!wd"p1L q: f%e J4)Ǡ0KM1Z $uGkRbl֏!($f,KA4qX)JȁK(pM `c;]BzE:obÐ`L>iF`(/D&y ak/Y9>oSBYY==h\S# x+HN7zEE7s\&s "ƈ݊C/o.`/+N{.S<z),?̃V`x{hiգ5#xw t`IvQoC_ű~܌J҅e KN"Ja`¦(R& `–l3#wVaC½'P^@Uey2^zA``XLJVieK8VTkE?X"P% ]XC6槛ZpULLY@TRT脱,/Z%`=_`-(f ֫j'NEP¤w@a#&"Z7qg)HJe&e"X,J ˑTkcb<+b+e#Ӏ uzIf`Fip%Na/4H8IEiw(Njs%u+XY kR U4U) 9$Ui@p,A LxDW/Jx3(lf;"_]ĜDm@2H]VKv{ XZYab (KUV ] 1&f$x:&K, ;)m6.+eD)`J4,0wO9`M6ZB>inJ#vLC@?p~|jǽr:g_Ի|NWE@LR W{k8: <:**kF&0LNuʸ39؂BɞoԍV +kD6D;Vu"HW1DN*KN[&V>.9c+aM#\_*|FEM}RHS-" ss ?>GȳVT0,VTcЯ ,C~]j>ԕܮj#9/(1J݋B`gU"ߵ(i"b֎dUH2$& `ZTI >#z)+1B|RĞVdNxf>S's$g3pYi+ $5 /eEREȎbӞ\), +e:JrJ59`'ś(05uV`)h"g ST'9es )D/gQjm"έ7;Ru#L`ם`!S;p2U%r .1MD(f[dh/m=x)LȾ@qӰK]0SaЏ_l*0 (Cruum o)r]ܭ-ll}gU+>m?#>Au#]"R/>  nvejx=!Q)1UUɰ/mX\G>w >ߵ%DEVkߎb0u`H:q5c ry +L [2D?"CWjjz([X3q_ݗU}\y1VN+ﰇxOGwg*Px?ٯ(V #Z?e끟bR1v9D'bH*R9UonQ7'v xw@C,Ԓm!8- ӻ-P$8wMqC>0rYĨF{o 9)9$|fDc9nˠƠ'EQ,clly1 U}5`ix_s>`%緯T#[Mx .Gg`=B/-4|e:NN5Cj1k9L FdNكDˁk=EdWjFhIr0F)ZȞRMن&KH'zu_;$q4;Ee#\0ZǕ5Ri]A± ȔsO RN;,ؘ p>&e4^hh66H 5! p qȒa$uS4sRS3rH׳fn%]3p!2ҵ{uwWnX}=\>nتWN7l\N5W=p_!eC\rnlM&!Xw?[H;h;uLTPƘ>]$l[(~1vz(hzܵW3p$jCNba@O\wFd.X#:}}bAM7{)چqrg"0l>g f'㡆nKMo>zͻ-6Hi`FəhF1;3_Ri hy*S"9"vr>*Z`V. xS:u|oNJ0bM>>)srV-3 e&7RvT1."]{t~tLP)KywkӕG荏/GAg|>j|v7%>?yr:hA^bh@O5&tBT﮿ot>\ a_x["դDfQ -*̘e)"ch5cKĪ1, u<9L<ďȳ׼O.Y ȌZs≔wz-@GK@w[?=RcMbUV%mfrnDr}hZ[)Xf>?3;lCl"۬O۰~zΏׅ2&x1ك L+tL x̌Q±G@= Fw~hytNB?L-+ ҒE4Jpd [,!;FvBO;zzn,ڭ hLϭ/|+AKDPS$˞'b:'KFL3fP>E)fϡ_;*Ħo޺;[e?V"e1uJZx!MEwp>/'G||ˇ 2}f!C.H?rR  cq.Œ&\n̨F<o#JۋSoUAL$rEwf}UirKB5c 3(g|aa/t-^W&òI^Ax-hk8G1ZAl@4Fu~qZ[޻~J6smWƧ]_]{εO{4sJxY?{Wţ >fvu곙&VCnzsUif_' ] >ns c簠a _Qs=ٗ[0'6'vqY^T&~QM |:_) UBRĥZ q-әL >G5ף=?ݾsxu;Ѫ{Cl_߬N/?S,zXB3 Ji}878>}iI Si"u\Jf)U8'b,) !ׂ5M+zQxu3<d(BklP# XY$@3Dln\/ӼeR!mQUw?!Un1UR c nx %~`+*ۄT1`'Q&*a,%U&(N"Gv(1N-i_0/_q7x[D~ {z({,\`KOo߽9=}\?(=_ 2zi."b|;>̢bRoo,O83Bu[X,;J??*@3]}ė >Cޕq$0ik;;/cyoG5Y$YŢ/ZΌ"*2#3">fQk8v j8]`6J*Q l5 l2Ԉ\PJXG {b-H92_1sDczć2$ A;\](7 sF4e j~MH  wRFRFD 'tJґ]mѽOnMv6\/ si}-7:I22ְWnJam:` @H ݧS3\kl;)h1([U`*w3y&'Fؚ<3@+3!VJOM&Y4SV=ZϠ}Oz\hNƫ:eyG_eY/ŨUQ-[]2l*a&+9@iϓWіԖ¹KF]zPg5tPLT{6Vq001TzoVhԹD NM=nMv T)=/-|+o7%~)SyI(U&ZR<rG\/)견<˸k!+A )fRA̢f6(d".!k:"{s-Nn A[f*`SٔJ迾|ك,!K 8\4V'-Gjs2uu?" M6?xQS92LʕyUF猰2!Xϋķ|^Mi^E]) 4wE4]Cc]Xׇu$pf(-qhcI`uQKXRh H5t؇4]5wp;S٧xwso;̷xn L.~V]^-7_.uڳNsёo)BxDᗔfYJ$jXFڑ+ywo ʌ D "FTaNZ.P)ch&'ݚHLJKsj?.='{AУ19FBrDHr5u6HMd^j*eSİ(}щV-cAӔ &o̗*;O@@ hZnpٽP[_J*u7)qOr©hHƄ\O-oFz26)%Y֙~(ނ3AhWWWpuن9Μ1Zh} 7M?˧ɇ|Ƭsb&()$83-nklp|E7eJ$P2PMqRz|2FٯԸxԆQ jqIhY[8u2AO[$n_)3_zA (w A'upS1O{ѮO$U'VfUX}fDќx72 (#n7DR&HRDp:p\JܙHAJeVQ$5Nۘ%yy`JsFP @x.KL[`)}.(@IhƢoZ׻A 3U*5b5 %`p9P҈bGYF;f)k2YĒsȂc6@"1ro[~(R jm<5g&իv Rῠ3]16iltPLJA&f^ED?1pG:, g5RKZ Vlxy}<u5?}m9[Yk눜k]O=VTZIƫ{tʂyDrrg۹)bp$G7WxָS>=L|A?Ia8#~Y|ѳ˚@w >a59[s9;'!ؘfyԌym xw~ڒihi1ܨ'FԇЖ@k;ѵ-F 40qs=4$BNJC;ͺZ{r|0PN7__5@do~#AA0zߩQ(GEÁ)iY*<-|QQȴTJsC[P@[`nQ`;;0 Hh\p<&FbhZU)S),L1q(&=P#.Σtsf3a!pBqPiدdq.rԒ q 愂طgqRsXR=,JK LN6b@L߭$h:BѺu`ޥCI\I"e?;T"=*``!_uS\VGlb [_ݾޭ|wv/-Z5\7oQiГܮR4~nDJAROwK$[^\yw?#LWX\[ `ZH 5Vpv3-qy}0֦ ].1YSvo-hw4|K9ijgu}g; 2᪾}>g1|n'^:f}XEKɛz cn*~;NXש/D$eG ꓮW_ӿkmeqy[zai=]*Eo,䕛hMVTj׽d ޭ)w;;TѼ[yLVB^ؔiOdͻ5LdSnN}oôdd[y2Bwa!Dlɇ $"R rLt>G7#mޭrۦRFݷ6ϸTdϬV}2*/}[4ISYYw?}uЯ+ vEqrl6!s^}@ڸBA<A5 bo|qtn2 8^p[O3m:P3k>DUs689p{4w E;/Vצ$ͻ`L&?Eq@=oSzT֓} [)@n̛|_US` &XE*pFLW&1gMԎ/o(A&Wh4dk9JӤI͵j[ *IR['AFN43 &NBkϸܯh tmо7Z <)muۼ[WDO7}}xn߽}K?9wZ)xHޝh%r>8tpG|3X~ߌ7xS za~ XH gZx `B{u"0b\Q8j9D΃8׼DK6DDoWN)P*SX8>*KlȱOH$@YmV(q]`> 8C{==FZ̩ޒU3s•2⸜LCL4F8#Rxb% RŬ`2iGM$װ,8kE<4_ `Lfk hq}y6%'!3D1BqI$Θ"HpaURE&%^#FE*.r`( ))9(_p.dݺIlr3N%`}<ܐCCsu5a xrΥJӋv \_0x&2M5xJA92zIF03&Qa?XL /&l-U9[\PHe ʙ/~zKDD_ri4?_ fvv],?o &\ a9 6d"T{;[n` t): 1C1ˁo@1R]6TY!"(ᇫ\d_\_rfeG&&G\aoHw[žLJzO G"ad}C?nU PEO5Y̧ uNH;;[' R>(0>N&=dkօDՓGAj NNO~B{壯 }!+KR:gfR+95 Ea>33A)D/bdK}!h!ꢡ}t^74 N*)$5͵LH}29i=5MO!'% ){nS,,vc{ r!_ SF2blbR%isA4Xθ2B$Ex `z~bi2msFHMEj_o{v~c}dfst4޾-X؏zIRr40YK-T6G#{-M﯈Z)9A܍uqL- MJZZiBfm 41Zhqˍ"AsZ>+IT_}R%2BP*&t+AMmCC{-B9|0]h83wͭ^F=|;~Iˉ!Cm{YعFUbRZsOo =HXP,,p LRciiW!z*57ule;+`D%!RiR];/}+iʪf'iǂ ?;dAT=CkI ?R~zB1+5 ~:P]!Tʠ j`Rr9߆P1|:P]!TٙYWI|?~k۶(sV3kUryYәxY+-REl#5$ _8JXN<5`c\Ϩ`1yt7ޖؗX^BST ShɘcBmX Ris~Yۛ ,S<1lDzIw ‹+^w P6:MNw:{giI6.G=C8 q y?UכVD1D];~ao_, tQYԬ]E*tkvD; ~ *qMtAels ['g/?MCϙXühTэN5Ea-3Ϭk@{ep’j?VFnp%% > sEB xLU 'f~> OU$aA0֭llvgYLvW1ځ=Y ~k[c,)7w~^;/Yj*6 5eW.Q ? &YY1rsN>@i~f)=KQٰ"aFKE4KgCV}-:;FvD xZ'Vkj6$䕋hLli7 [.uD'w&툞*IPKvkCB^fɔ"Ls11fv;b<[񥺶[ E4K~{'r 11FJPH\FI 4џoa%e)tF߯Օ@5=\~'DҏdOhu%h"OOiOV$ 9sDJG?aZ] Z~'=Yc J 6 JT1Ohu%h :V0 J\󡗞Z1 Qݯ:Ä8ifMpJַD0gY_MOi(ڻO,0Zu:w6ji؁4_ i?-&3f>|jyw"yXs7/8}?0m&᤯W6''I*Ɏ&YI,'Dr;%lVA0ܹQ=)SGV*.=dzYd0y(NѲ s-I+koW1*Ow'w;C,p-cA⑺7bTɡTa1m2_FwClI7< D$S_v,%z4w%?MZǟ`ƪ"xJۿ^bM*'M4{>WzkQb'L2**T"t[䆆Ϋ870b8 vmVFbz`!$VG4(6uك.RHi%Rg/OmuR+_0T҅U;vMQҏ~8=ެn-:鴖LHP;pRGVEZ|+/湍Z?m qCik3_){<#:݋뭁=-{U?&kj]I!/5C%k(hCbDZPX_=]~ A=Z/ä#xGiW6nLE(؞!:ӗ{/2d4U.lWo)xijv($h[HG+Mah밎vSm އ6m(&%][ y4xS5<ɒN$UW6L-Iy~iG9lEE4[} xyc'.3hLjɤNsX/rWJ'o-R1SB^0Iea3 : v&{xXu,zŔ\%Jېj"Zΐ|^/#:ݦTMH [>lQ3qo#U:w[wuըϵ]OF4={kս3p7qtJeJS覊IXzaqk~m]zm7 qqS[y4>毗cZA$LoOsa ZY0”, Hr5te@H|Txq3rĥJ$P-gJ gݭo1Y =:5g`X>k{˯uY^_{2~e9- au-޾dQgsxWвKillˀ%EpAAqB 7YhIX9í9իj(*Z9%4LG|.V&A1 yMNUi8nq,}$zmAxPx$G=vɍM\/!\Ddإ (9jNU=e.FNg&l#3Q= LPziYR9X)(&HQJA2HCq9ToʮMpʄK['Ȥl~vKqpEH9A["w&7y%ﯡ{~ۆr͒)-oli7@QAbPGtrhݎh^*AH 'ZW!!\D3dJ)ɼQ&Ҳݞ7 eV %dce-ƚI擄_388Mqch5E&tiB,#BtOgZ'"hN{F\s·v`İBVRq'%'[zg_뱵&MPb!NR D㙆.;PL^?*^ {8vH{m v-7K l0lZGx:;xO?d={\O7Is72&ϑ@-O^&_L^( ynuo9aRyj nkB*%I:xb &kkJ] r)U&u:H 0 >5 ETB ɐZl(S$ZKE%W1!a'=SCaq*JT!H 4F`#$NS4ӲT`R+X|y8@"oTxr ˪IŪI_JeeW|_zK"!H3Kryuw3_})(t " D?lb7ӛߞ~&]T?w|ժWܮ-Jr =Ojy΅A&1 7}qM5j%)֝.EGn` {ߍPDBMBL457 VAh6[H*a,Oeg1$D=z&U2w>Z(>2_<)\EO*z WMoV_',a=& X,9r.oRY9JQO~P9k跋e[ոݨ8bnV?.>p.{ )Q(vш LKa]|"#RKر*8p3!4JIpYR;R!(+B9_j$ „ bOu!Cb p%%חM)X\׸,+H[J obos]ҿt>p\}|m&~OV66dzl Kм {"\P%݉Ks9wľ$(z v`WWd̃O v)Q2 ߘǕTq8Zͪ* c?heJcW9J3&ŘDKyTUT1;a@ILD6@Xno)4~qX r/5\<.qRDY)QVTVv$]_?NvweMnH0eǻ[Cz4^;F;(JnvdKvL/PdبꠚCY d}Ho?aa1<)">F s\@#vMͪ>[iRy<-0%g1c z`la?9}t <.F"z"FmiB ܟR7Ah\J@cۧ:mB TnhV ų{]w ώXw јtn(?OqU.;v??&_xM:4qIaav314Zdހ>Oj&.320]ݵ+ߣ\5b!KL )(8#r^bJ-,2R  zF$m/5$z{HVyVaMHVp,p,0#ĂH+7ѱ䐴$v722+成ӚkPڿa*Jw]qV\ra0 :ל\_"ԛ/:\H-=n9c1! gp.7< nڈ;JSal@ꝢE -9Z _3 E0at`a.D1.`4yfwS()`c*?S̩m6t81\e1%䆗#V"()Q RD(sFP]59$KiVLHТ0"n'"WcQ¬ Jr(ƺ=C'H/ #ped|\WE5PLl7"eo?k[]߫ ׯ^ES_`f <>1Pv.grդ͕l_kڤ: pt a̦Xi/j=jq|6.?ڽԠ}6!st]uy-IMY kl?l$5V^A@898wl.0ec8F~d4b;Tcb?n,@"UgmŔT0'tʅ}d\cGS;]ecړS) Q`UH9w6 dگnobhW@V(.u,I4y܏įj4@zT=CdB@z_[&x{EWC"c-k4 Ry>P% O4qV+[?3m@^\f?~R/du)ܙc\7R-KoHOIݟarUVH3:3 2V^5QƔDeh-#W8 ($J](;)bQ9 z)wh-\`i*AGˎi =~X0 qa}"+fH3%7<1cC+s7"S'zZY>69jL)]R==;Fl@Gr{]{*0MR=l|8S-_Ap1fȅq4["`;N@C±4G~1L~w# (sKwd@$#0INaՎ=ܻJѷ"92%SSG<~Af|`i HuQ)H6{ :" W!b>.bSiYmhA/!S)s1_gD3'j1[M,3gr^ёY?u_ e>B~ȋ#=~Gh*AЍ_¸C.NlVG kuʖ'G)M 7ݥ}Y;3l'Fz1:Lqw | K}p6ŤBX?%5o=AbBN*]{Rdl;27jسRr3GklJŒB9Edax=I, _rg##_j&s2Z˿@E)<HA)KV" ˹fA)]܄(RJDsQ+, D8GۈJ/=W )Fv?Z_t9b0աAÎ)<+Acyc&ԅ;W[9?֤\Nrwd=Iw]oٚ]RSw$V3QR_v!3BRu4:i/5G="銧9ce<^+oJO\Z[Q^97W{fu+O΅L ᦘos4?_ޯwk>֒z/xq_Y,RJ6+lUza;1Q~xʏ[nxYSzn/RX 76%U49%3r( 5#_ E rhe+㽜-֢.(] w?5 ;їX;6)ɪfeEU2Ջc=OcbHXjýd԰^Vfǀ&8x>RR^YA3 ?Uo)9b,*p$ 6JɧH.AOgN= VT穞L26dǦdj?m8 ]U}W'tysYn+SZ{Y?Ι37:gntѹiNawk )1\(u!0ȚFqAҘQ@?|~ ~=y^ULk$דM|=t?6q'.;ooRz> G@>OY|F8AD ZbjZ`sj,hnHRRcia49ť&0Tʡ@0 O\:Bک!-//X0Dy ,#V.B,Xɑqip7. !Ł4BQvVhC; n&usQ+DE7t1Jqڝr֫h_͏vvb,S|=˹^O !; wQ#x}^[*ҏ*7֯XǏ[LշՇ3%PUe2dYVhLKd8å9(_6,nV1 w-?_ԢW]B>ZpN_{&v*yp+vY[h6Wӿ]~0LWu]G,EW@1^ h %D95}| jydsՐB9/X |qSfu <}_zУ D/($,AM \E)ץC2e4 ̅>g.9sϙ3VadX!!P 1хF`6 ЂȒ)^JQT~l Is61ݤِpxHl,IpHB:T, N?xfnvif?;0.#@`P &0"Z%P1b; c4%2i;˝XH aa YR2nR,qTJ3s&,5- No,˥f(wFdL|Ͱzn\xd|&2&_=5\s~7_z|QQ6Ar8'尙+Y/hGs 7jүOWW?%C%ނ5EƆ5Um;s,;!iI텴@`@ddRRQF%M1NqAZ7k786C #_;KP6؞Pb.o+*/,0AnYajx!ՀTsVn NBwPR 5aKCXQx~* \JR)XGk)+yJ 94fMFK61C_6'& ?ֵ-(?Y1Fܹf zU]#քn!4IhV 12$ᝉ5ko`fM63W+ҌQ$%1FZ % #h(@p8jeF.xOl7w3mo$Q]QLsE E! KH+Id6{ItQ\Y>ǛJX y|\2AX%"#d>OHFI27wn[+'>ѸY\N9'*>2SzF_88#54DncėJ@3c0cd+cn?|"i˦rUS&Q8t 1Zaϊv0`gW[;]PPrvL3IU脭A5|Fc8y>Fvn/|닇ݏ-FEr㴧}co|M5F8iLpS@&/w-Y=u63}eW3}z?O28B2ݕLJP8V;J^P gN0vd8j(_9dJBB Y( ibx`Yn E:0L0 %Uɾ뤜C'i< Vtdp6YbɐT)RQ js!l&VdkQQ}Q}}i3U@IѩHуs> jbu>*14s!/4ARę]W87s!ƸO[!Ol#ݾ99ìd%\Y.M 2e9B=;? # P[ğO)!)v?|zmӇ,J U ]![C6H%4jCqoC)t; 9n{xfӼoCFv_K1߶ɰ dܔa[!F=G{N6 ypܡ7tУ N豢{pEݽkz2ڪ(Q돣]st}wpTpeO%EbwbjǺkEpKT}5v:r/0)Ү İ3&£&\L;݄S(<,„SYzIԐXpǦ{gf!}sc譮G7̬wZCC,dљt5:DTwA˖fiD{T<%뻧O]̸_=]ܰ,Yqo{p LjUD]t\mX6pWa\~*Mcg7|8=uy$Gw_3`&bb^߅? Ǻ pװJPJ4Z(P$խP&0P$CT h5^eUcd1B 3$WԸOр/vJ`F5o}GHj5QÖJ0>7'M+#UaB$e Va<[;%&` A'Uߋ}S!(Um2ޕB>fNn I-O)IL(3IBugKGJڮ:o ?2Wȱj F ,Efr[}W yfs6N`k,r;s僘3aE?vq`]CBK ~VΠNRpf@ y%#I=FXc!k++qiT<D3ܣ}_/sq)"(.w 3P<@ {%`HM+_ 'džws! 3Bjٕz=h{טCo#/P =z@ǧu(ʓRȃYa7\| (٢@$0o[x%CWuTFON'#)UCb!aK1“÷o.G- ft'3QJ>Cع <ճ9!w8퓊#rM7b02 \ SVb!l@ e/ 3~g!M`ؾs[ D 5we&+j͌vr>z|/n{77|;ivyOvyw% >,q)VwVߦѴH~ `B& Qp&~嬕.z{;k60n묅2 'hO@m=@I=\UjØcuև WԳR7[)} ϝTuvPe?t?&qRoԛ: _!ڇŻ۔]"U2)#Rܺ)I=TJVWDFpN%uOTD+ǑXM!C2`cߩrA`z*w) x"t4LRmm) hK]iJUFM 2(E%)U>J&RN삡a[1"6ej(ۋiT5 \j@q_5TT%+nV;*m6ƣl QT;yU857EqJ)Z=WW} }m4~^.ndǐ˔)IFI{kʒI _{DI Ý.7C 9o?R%u{dqh;^.@7"VL[^Fʶ8,0(*Ր %EZqmfhp9:1RP 4w1v\&*eY^!@UTR_Ɍq2f VvX P*m|"5hKR2HJTT^ùpBqRB_‡%4J"@D|yM%[ʨbi/X)ce OzV J@nP^meS5V l;բS-sQjZ"M"}jO?JۺYV8IG-ǷzV^._} YD0=X7QP>\>y#<JzzuugҵaJ[\5t.IL>OI~vu:A;$Eɣe=Sd#2)l7"m^)g]X'GX'}LAgBo?5•gT>5L_aLaJhqwכtJ@ /\-Zhk5sF,bAؓ)"ky<%3/YĨ*7vc͉鹊*F7ݞa:.P)THDl|!mI]iNmIuTp4%p+#BtiA;HʇP T9c0CgvM)Fr".ܧ&7RFL3Of7t?BEciKS} ɔ˺e K,d7-J(u{OQJX@n}L}5~$9 E FJ 6Ҕ{N$l[Gοow֬gv|*Qgx:SM8sYRKJL VDVL, ܘHw":rݸ~` P4BRQN"SE~;t`fgcJ7pl*3 5^ǚ;JsKQ  JǠD_RlTn J(9zՎZɐ虳8| ue+pENm5mpn׭!P端j8o)*eZXbmEc,, 6MA} y(*T(5R8̣x$Zϼ97hQ# abVaZf>J4ݫeU E{,V18*H J@9t|!ovvUb?I)I^ iٔ$]MI~u"ixv:6=GM]f5:|yg7Ϊ]vKVJhIzo5Lܵ|rέE54K1&Kq2_ w>29 J>/t_V9jVqZty+T8gg5~Dc.Ez\%1_@)kޔqΓ dLEhj JM>Jcl .'ԃErz EEII,i43A4p4d=c I`D iZ*8Zv@p6N:lWE( }"N"$ qm+ejgcKKk P0ʂFrB*-)]2UDO 4*4uHqV 7leʦŹ6mmZKڴ~kc >%-:jς2B-RKރ9cQbl̲ҒlejGã%`@Nsͩ+7'd'#'T-3oMӒFm8-V}b*PEG(tɫ^i_rE7'1{^y>p>Q1>A1az>nu3®go~US S8@V j<.T [HrEj:PMYuIŷD,Ĕ)a#+POqk%?`IM^2ˎV9g@}M [z^ku-._a&9ŕmNoAl5ϻ߯kt# {Ӣ[!oLN̓2ǤM_fL.'s뺱,jJ2s P7#]$:5,Kvһ\CT +7l+k9ǸK#B;@9PXeEcܧ.qgH,FR.cC񦔞1kaQ~~u~'Q;إ`z9H'Җ7 Uu2y(Z嬧C~p^yE!a*{C+ U褫9)gyAڑtIR 0X" ǬчxC»B\3^@ >R5V9k u~VO[/͟`hzn Y(OYnu8W`qPRA*PRL@#:2&uYfgWFFh뀑{IeZRlT:dN,""JQJCؐFYΔ. Ք6:O|Rk ZbYU_E>W4k\i6XYK^QMWI~R/nY #\9zsy|I T?sm5b]cCD P|Vܢf[V8=Ѵ8K\g>eYj)Dž*Aы?Fv5U:cI֖@ :09TU56/ŏ^QN5ىhӡjo%Y#2""U>p%KC\)tzae0"ZS"3.K& '4wQb4[HInՙ#]iE Ht ^,$٧x'Dq}%$1w,8镊@ixSUS\.pE#D8jDA 1 Z2O>*ePEiߚ)MUվ* ڸ3~*КX4WJ2e@[\(55.}YL ͠}Zhcwmek↠g,K4 %UK8.‎&bK F &" ܳ*p߼!ߐgC/v~ӳTDy>smba$.=ۻa{iJHHI;AJ"{VL?j HdOy`xbrZan|L<_^]^ioU f]Xvfw7\K+I:(Ȭ&*#KK)? =.dFH?XD~'ug & W Q"~Px%$~bW.7! L0NH!\*YpIN}@릉Hgōi YFFZe -ڠ,gGL`hX|/j31(_J& H<3IsKV\/b)+ a!xXAm &Ȓz(GZ L-ILd ,C$CzZ[=}C̩~$h1_͗y]o4g(YIijkr  Hb E$s/CotVJ>%tLV(GC\ʶ7)pp`92Nn.u,{X|w(jQWvJQA,4 s#LћQq5۷Gkt akEQ0働 &OX2tXq'sE/ǂ榽hF\$YkRw9edŠƺ}9^M.mUJͷz38ɋ$EIRȼ$_'rzcCQ_@5\گnb1$~E2pŴIP*&cq vDLMh& _DrNA#b{Ca`@P9%dɄBpɑVFApq>@!USHm-AD\܂d7O s5Ap}1 ̹;F\lI' HYЂtMщ`BANoh M`"` ƥP rK2 N.Ϟ۳8rZcp0!yPѓ%bA(8[ňYgG¸gq&:5tq@[wR[(Xt,n✋y<;7Վ;M\@0dTQ4{ݶ/>By!MŮ󺽊L*'{E5R'HT&KJ+&ֳ[GF0 nݒ1|#B2(AsmT,!͞U4ݱ6Cou;#Œ4s{ s /d 9s1ML=KXHdcPr< cgo`l@3hYH|%WC?]!F~u=4'ҧ5@EE2V!xI gj-e4 #S}~4N$yYd?4W,$ǝjrFdjVndcPVȎ9ih,z j݄8rz?,Kr>1ANIGǢU\}GG\ fyA1Yd&dM>YrՔ3dc/Z6iD)+KuUsDKX2}rc)-RƂ*IQZϩ4ATL"M)dd)/=JM\PҋJ\ "HRh8k-΂J;BI.,^eIě(",3ˤ+:l+(IEߴhtC1Y@ X"w"P8sR3tAJ> au*{UMsČ1hw4*ui[qwѫQĪ<(hi&ED2j}`k_'X@*81H␢(\u>Du#C|+_ϟ%s[!z'Ju`7"CL)i1Q_V_ۥ?sfB|%ɖ(gi fG_i*^;5{j.7Ԋ p޸ȗO''tXiK1XC}+@.B M!64 #!X7>b=qK^9ԖXmzQo\^)B#1CObzWlkФ΋cdPzbĻ5gE^+U,on! &>tws̜9&@lpF3V&$9}Ѳ/6r:Q-Wle ԗ+)ȜאI4*J&KV1 5Zv4:f}εá'Q(T 7#4 duL}I#OQ-H]&29nwX̥ Ȳ-3Dc+؈sq1z}b2ՠ{&B .F7&J`Uo\Nܽh(ޮf4I|&Ikek=D:som19>NYmGX2|>â8-iKe4GlhGw|XkIO-C}[z~$Q%U#x0Kx|A\`RHNۦu Q+%kܤ(y4l,hrg_5PjCևp}B{T &*4iT]|U,TcU^ݰ}8t7>Z/|O{#WW=C9x]ݐW0"9mw2+$2-`\\r>g?Υ5R%~X;a" {)ĄP =tf,kNџL,o}$sxNhN5om6pftWK(Z!H.mɿg~GvBܚ+"t͚wF@hj> ~ ?F8+om~wYf yϞ@^o Ud}pT|NCG"uBV(0@Aٓ wh[w\ }.u77,V5)X7 5%h2aiJ`=ۡX-(04Al8i(hтգ2J{#i 0+hn'v_d۫6"m|,( s79_\KV=f2M^ ;CW௞>ara@ 9ȃ=#UW{b'aveT_H579&iGsn_,}.AÿoW~~G|lY4oBv75ϣ2j8+qt._|4x2T鬁~|?aA3 'OG YU8!7Z#ihsQxRܢK;j1jpuG# T>6 xjiC`uO:C??q !\ZGL@eM}(.+qAi)w vO{{Z鞠A~CN*c,1k1j@R*ICI~%rB2B8T8*U]tTޙr yIS 2zEڗl *#*OUך}iGxâYigRgtV. \&z3giYr,V2#2x5,42dhjDHP:jF^?'6$J` BP[ M³Id'Y{-u͜=|^,y4=Mm+Ffl}3ȉſN`~z3+]fC~\|9%8W.ƛ%(ٗ^u Y( )#P\&^|Iš@sRF߼{[@w_*ʣYYk )rzA!6$}6*$lr`Ne p{zQʹk }`P{ -t]ko MxXZĥ`IhuUAYJa+_giH8vφRm?u=-tRflwL(oC;I]#A끯oG·{F>JW]/Y:9A+/hWe;W,Q@ dć-͎X^)9Nj H[Yk|gc6UkBo%Y[Ix@  ׃q 8ă!$;xgNQK! vvp6TE"Q tN8p*WaKeG뽏=P=0ǡ8]ZooE`lꢟG4`iw缜d{|~8x-,&CZ? +5<|wn[$ 1\ i*9R {Ey%rU+3P]  5b)uS^5dI;e|>⩧`Z$IS)`k=rF)L?x;2/IW{+܇ĆLWFHLEŚ(cn%<`0*]F/xQ, C+2Im+:ۿ}ܶg w&QbQNd#0UV=F4)m#X][3RphR  1Wr 2J kc(yD;L'.,R zAHYfMPQLS//:Xd6RŜZm`e07%le*AR s_&?J+`PrczߤAY2dzHAޮ pYO{[|ZkBLh UNY*5k%Tp*Ơzw/XYzBm41x=+ƑLp=1f xܡ\t jގQ:ػ9Q(2w#sRJʙnɇG+ ~.]oSf E^!EuMYQk&/cok2LH2ҿ=R\,g;;[Tr Xs~턗w Jc!sD=֋:T?G$$亸]zyV--1B Fro4mtJȳ7F4>8ǣY1r \@ ]2Y]8'|fL#:"CMޝ()rR]\M浰^QYqRn5B_qF[Yr>c)},6k35-GETn-XVl1jD< '!gWUh@x?Y|ORx{o/_5Hy4ZdmdХLjxҖ1(eHIni}80raO6:wURx`QBlT[ysT:hc_cfO*vt8|#pؿ֕j2^^lj5/dQ<1{'uہч˼w\d'F~?~{Z4kzn`֑yт\huq>eCkM:/Oxd%ʬBvm?JTxh1zwv^xʚK] :9X6Wߛvx|IA[˿|3o61yH}dcFyqi+lk1ב:fNPs:R &1>4T#Q\0!YpYSi?B-a:{>n!<(kxXk.c_rti5ژJ-HwMG76rCM[<‘F= |PXDo0;qt|; æ0T?}Ӽh\hi^4Ƕ-6(ذc8tld3x^CƙlbDi7~97%x@OF8p/8H(纼N}x5rb[紟N}<\cT~8RWK͡__/W7 q-`MrF&iR..ٰtЙS9`R)B"[a=TPl 4L$05u87hPƶXqVlRS T *mA0~+.V4'b]x4+tOT"i.ɼIH뛄hkh+D[ZY ް"^]RHL׏D;tzR(FZgmkTƿLh=l Jg_Z7i7nO_eT)gM  p0*VsP`U X~ֶЅ:ڝ@r!JB* x9 FݒҺ7jU5B !z`"!?>(¦φ E}+y]2&LZtbٟ\k< Og L[V&(Eh$ЩM/&cF {D"{-+! |RvF³O.'4ijWE,Pʅ&)J+EKNa*sȽXi^qmL@ފo6 P c-g[$6k̩jE) 8MUTJA2L*IpQ3HtȅLEnwf%y~j;zz4|_ ZPBߟ>([!Oc_>G6wtYL=z dye:?A=4t,Ô"'k;gSc)oYMNa$Td778V&lxYT 8ޝ]w렌w-01JսQ}jQkj5ۜO8.p0oH*y/́J g$FOr" ;j ^^pxbxn۹ڀgbvv2DW; hy|qsAH(<|+ dV &I.w$ןŁ ]4b# =k5M: ceWn8wWˑ}e05W{-׎WioWbI5gЙGr{=Y({.ԘT˲'Y`97vCw9^!ka|o=efQԢ~lĪ_BjWV|~/k#-Fkh7T\>}p)n1PC6ՌSߚ).u|bԹj%s͜A{eཙppeo a8zCaC,ص/U>hBhh 3!B}]:W% VTҧP>hH9kwRtgԜJ7[ĸK}@ }/Fi:WWz,gJ Q.Ґo\ET3^yṵ`Duu;2̺O+nCh7t=Z ffcX<(Qhcݎ=onuBCq]ө¤a1V66.ɬNﯣڳMyp(Lj^xrW//>pz;RBk0R>ۜP{Q'(טirDdD)st Mׯ*эEy;Fs~2a>I0ϥ4&b~v8>\NO)1;>]tf Wz4OJ{+D9i1˦j*7 ug2#ȱ}dyʏ5yMm6Fg' زeωQ5% 1HA*FWRQiвFgxDF֑W`|iP2'SYBYZᛍ8+|·cin|W^B3覫nλ(: 88dp%ʑ@Õɓ\v聚khtjZhNN,Uj Q`H@σmi:YJ4KRzDO&z ɇ&c3cʢ/5zWLwOR͇w)¨pRhLF 5:vJI|R:?7]g{]ݤ=iNrcw0BJCAO?G{6_Y i=O5;0 "э5~Bi4E:P)_XyN{ @4OzwR/3 ]ω:'۱#U\N`N&!' HVawsF͠ 7Z>!G<#k QIV9c RNW;pn6nk} Dsz^eMF1{n]6d#VtB4_b8S ?M.ɒR;.`C28Kmє!ĕMRP0uBDɅDZ g1%̨aPqcmۉ&7r 1Y OdDl2(vhN$V/;Fc c2Y7vx {VKiT-j/$1"ֶk46(D >L$I?ڝ?]~hG*a:;.]>2T+[ V'Ņ$iH?`@jL?2|uSF34u٧[zهU|2 t?m-S;̖_CJ9zr#?i$>ٽe=pl"gxF/*?>A  7(V>0@|9#c Z^?fs Y+Bt؁}0k rG6A_郑f[>k˵{cPs$|-u{y}8ʻ^u1w51&9ђ"!k* *@Z.NztDݛ7PsAIߊn ՚+j@ .`  r@@A,QDI" x=dW0Zb5DaDҼQZ*!u昼yKFx-7{ 324K@*-%M^T/_0z7h2D]Qdŭ̢M&D j_2 08SCrR$P**Ἵ{K&᪜vgbqӫegEV9]i+*`;T̃N6hQ1?Q) ˗5ixu<=s7u}[̯o9-'EP WyDV7>:_ZM9[Gs1;0\HBT);Iydzd]*$^@G 7 /=FdpR8 b <F*Jؚ) |- .MU*P@??EKF5EϤ:0N\$JGkA^)ax17C}(7<Q3FXrTP8 `za`}M% I6r Ҥ਱X)g$fnv,Z xͫhhPi}""ν\'/ރy/cmQ?E+/7:>{\&jv>˹_2yNPkףlQ'ӭ~q@n`e o? R^Ƴ+Kl|7^e.\d*sT~z5&&ۖײٶ?E7ōu1"zN6G .((T/@fi.bc^Lol>XFƒt6=2ݼĊޯ-e)jo;rJea(1In~ %VUs‡%4I3 +1B;E2hN-$Zy}"P5vڈ@R$%;躝tWw_"j;Jht=qՎZeS9fʻF{ ^/)Op5TqTl[*=\z1Щb 7|cZvm35h0vG[0 a yH՛PW"Bcmv6(g} E+IZ5rlHjvM;^ :jtPkhї%MGف,S-Ѣ7-Аo\EPmuk$Gn}yP:}ƺs߷uYh݆Аo\EtT8PRM۸D} ZZU<>ZU2oOjC<@+"y4y6އ%+i KFE|aƜ WW k,Q7畽ż2VpCVkoOѽaJ[@&jqc̽ ACi, 3+asM lC}Z.޸PK ZF8jQŻƥ6%gl`Nݕ7{kD%Ù}@4tw8n zpM}d}R >Sw0Ukg>LA \ d*5=LLi"KuezV 5I*Iq nO;ܞV`.wU*.C&ιBPN 0 C_ƱTE82*BCτD^h\2ģso8_v17Q1 #/a{< 37q13ض)aZ@0=x u2;ΓƨCko.\j4.S4<,I5U@}Zfhx$uFiaXB0ڳHB7I/*^ !HO;`2<ْ[`%먥x.WB`<bY "źqM"iUPZ~$jpzG2/GUЦjw>F] _yknR언pm%)uEob)rݳ#[ w=~N@<=D)L,ֹ*fM\|ey1mUGʩt?WݛE4P24I&8?jݦeo|޵x?.?Bm^y}z+;~47??WlTMוnzRW_>xߤE\,ԫc$m\Fl, nB<ݺ:F{vMF/{kD bJFJ{Яw>J *[}O;C9Er|;PPsq>XK[E-N5:QRBo{b46P@!p:XźWfz,[fTjuggWxpY{ Iz瘽ch Ϡr)ݞ6{?t7ɅjGv;jN>vU@s Gۓ.]EW/0w@;@Y^ 0zH} Kj>`bCs)5 s}:E{9$,yEe̶%2fû*c^zFT/i|ǫ|s!>Ø%[+α~GqHBz7Ɩ}jHAڡBոBxp7hA<:,##BI3VXT-p' !Qh7#i%)YEs a/Ezc&oGmItP6~w^'p,ė Ye-{)9?kurjƴuAr47s}gQvZ꼑4w)BGI2 8~յB^DRjdrY&a^wRjޕq܅C$WW)QeJ'n|ۓzt1ePՏ0(st<ʏmqA #$3qy[J_g ^xwLݟP6m课J]/rn?qk>]Zk?4n^g,ONV3HpW}- ~=N;!t^tJKŁ10xeF}ecV} deƍE;،?||~j|ʋ/[ =R.U.tcd`i[#FV zl|3 Ϋ sNޓ­Z6\c5j"i)e?1lQk->k-UAR a33r(מu ]lVnDSaTf+,Zp3K1cyj>oS@As`j~ 4_<$Zlja6rWI[evMB=;!AE@>78܋99oL@-EMJڃLrj6:mv&Ձ'8OYYnأ*dO![9'v(vQϊKĨη0G睢/Ziq:- ՅC1H{6ZZک<.+f ANe!z/<o>MR$Cr]2 0Vy{*40Gps=CTk?QcDZ=vc&JvNC ;:9m(8>!cV`㽙5j-nޭhY[ 1l0ޡw3Đs^CqJx!d%ZuP;#w`9//٦T"cdIX2R#eiur*{TRZ9ׯ=솬6r*lHdomUu:24-f%G@gَ9OŽ݅p5)%_;N# mȊ]'h[hI+GY8~l=&J!hj#t/(fwMŚ M2BO\B8<ߤ)QL( IXI#BxRVɓ_RDq1X >2AJHPft6JaD!2C*2W5 /âA> ˱5ԘH^[N4ew{/:f!mX" f/{G`le䁛}HcPD:.% eͯ -Pc$} yw>F0=EkIޅp5^QKq!T4Y_Z-`>7')i?8Q<5ʳʮ=q_: vg9iuP{v,̠;*K"Λ.H Ai1\+Xl[7vs(:\*}=g0--P++NIw>{+)Rz~ZԦkZh3RjRv`SPک!1fxvŰgm.^E.6 2GmXȃU-)J"BJysm9E`.nןܦΫC `?)Y"-3HI|K#A73g) qXZ4LۊL"U9㒳D :xʃ!(r 1'R% PDýyvv-u[2!J=Rp*^%l|P3 x2]>KkՄ7 Yvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004247210615157251151017706 0ustar rootrootMar 20 13:22:32 crc systemd[1]: Starting Kubernetes Kubelet... Mar 20 13:22:32 crc restorecon[4674]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:32 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Mar 20 13:22:33 crc restorecon[4674]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Mar 20 13:22:34 crc kubenswrapper[4690]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Mar 20 13:22:34 crc kubenswrapper[4690]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Mar 20 13:22:34 crc kubenswrapper[4690]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Mar 20 13:22:34 crc kubenswrapper[4690]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Mar 20 13:22:34 crc kubenswrapper[4690]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Mar 20 13:22:34 crc kubenswrapper[4690]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.142675 4690 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148290 4690 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148328 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148339 4690 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148351 4690 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148364 4690 feature_gate.go:330] unrecognized feature gate: PlatformOperators Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148372 4690 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148381 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148388 4690 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148397 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148405 4690 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148413 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148421 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148428 4690 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148436 4690 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148443 4690 feature_gate.go:330] unrecognized feature gate: OVNObservability Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148451 4690 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148459 4690 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148467 4690 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148475 4690 feature_gate.go:330] unrecognized feature gate: GatewayAPI Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148482 4690 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148490 4690 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148497 4690 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148508 4690 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148518 4690 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148527 4690 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148535 4690 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148551 4690 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148559 4690 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148569 4690 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148580 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148588 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148597 4690 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148606 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148885 4690 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148896 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148903 4690 feature_gate.go:330] unrecognized feature gate: NewOLM Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148912 4690 feature_gate.go:330] unrecognized feature gate: PinnedImages Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148920 4690 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148928 4690 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148937 4690 feature_gate.go:330] unrecognized feature gate: SignatureStores Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148944 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148952 4690 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148960 4690 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148968 4690 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148976 4690 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148984 4690 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148992 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.148999 4690 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149007 4690 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149014 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149022 4690 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149037 4690 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149048 4690 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149060 4690 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149070 4690 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149080 4690 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149090 4690 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149101 4690 feature_gate.go:330] unrecognized feature gate: Example Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149111 4690 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149119 4690 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149127 4690 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149135 4690 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149143 4690 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149150 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149160 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149171 4690 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149182 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149192 4690 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149200 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149208 4690 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.149216 4690 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149601 4690 flags.go:64] FLAG: --address="0.0.0.0" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149620 4690 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149637 4690 flags.go:64] FLAG: --anonymous-auth="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149651 4690 flags.go:64] FLAG: --application-metrics-count-limit="100" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149665 4690 flags.go:64] FLAG: --authentication-token-webhook="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149676 4690 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149691 4690 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149715 4690 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149726 4690 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149738 4690 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149751 4690 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149760 4690 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149770 4690 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149780 4690 flags.go:64] FLAG: --cgroup-root="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149788 4690 flags.go:64] FLAG: --cgroups-per-qos="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149798 4690 flags.go:64] FLAG: --client-ca-file="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149807 4690 flags.go:64] FLAG: --cloud-config="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149816 4690 flags.go:64] FLAG: --cloud-provider="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149827 4690 flags.go:64] FLAG: --cluster-dns="[]" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149838 4690 flags.go:64] FLAG: --cluster-domain="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149874 4690 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149884 4690 flags.go:64] FLAG: --config-dir="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149893 4690 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149903 4690 flags.go:64] FLAG: --container-log-max-files="5" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149914 4690 flags.go:64] FLAG: --container-log-max-size="10Mi" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149923 4690 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149932 4690 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149942 4690 flags.go:64] FLAG: --containerd-namespace="k8s.io" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149951 4690 flags.go:64] FLAG: --contention-profiling="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149960 4690 flags.go:64] FLAG: --cpu-cfs-quota="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149969 4690 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149978 4690 flags.go:64] FLAG: --cpu-manager-policy="none" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149987 4690 flags.go:64] FLAG: --cpu-manager-policy-options="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.149998 4690 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150007 4690 flags.go:64] FLAG: --enable-controller-attach-detach="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150016 4690 flags.go:64] FLAG: --enable-debugging-handlers="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150025 4690 flags.go:64] FLAG: --enable-load-reader="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150034 4690 flags.go:64] FLAG: --enable-server="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150044 4690 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150058 4690 flags.go:64] FLAG: --event-burst="100" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150069 4690 flags.go:64] FLAG: --event-qps="50" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150081 4690 flags.go:64] FLAG: --event-storage-age-limit="default=0" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150092 4690 flags.go:64] FLAG: --event-storage-event-limit="default=0" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150106 4690 flags.go:64] FLAG: --eviction-hard="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150117 4690 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150126 4690 flags.go:64] FLAG: --eviction-minimum-reclaim="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150135 4690 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150145 4690 flags.go:64] FLAG: --eviction-soft="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150154 4690 flags.go:64] FLAG: --eviction-soft-grace-period="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150164 4690 flags.go:64] FLAG: --exit-on-lock-contention="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150173 4690 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150182 4690 flags.go:64] FLAG: --experimental-mounter-path="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150191 4690 flags.go:64] FLAG: --fail-cgroupv1="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150200 4690 flags.go:64] FLAG: --fail-swap-on="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150209 4690 flags.go:64] FLAG: --feature-gates="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150220 4690 flags.go:64] FLAG: --file-check-frequency="20s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150229 4690 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150238 4690 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150247 4690 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150257 4690 flags.go:64] FLAG: --healthz-port="10248" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150266 4690 flags.go:64] FLAG: --help="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150275 4690 flags.go:64] FLAG: --hostname-override="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150284 4690 flags.go:64] FLAG: --housekeeping-interval="10s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150293 4690 flags.go:64] FLAG: --http-check-frequency="20s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150302 4690 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150311 4690 flags.go:64] FLAG: --image-credential-provider-config="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150320 4690 flags.go:64] FLAG: --image-gc-high-threshold="85" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150329 4690 flags.go:64] FLAG: --image-gc-low-threshold="80" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150337 4690 flags.go:64] FLAG: --image-service-endpoint="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150346 4690 flags.go:64] FLAG: --kernel-memcg-notification="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150355 4690 flags.go:64] FLAG: --kube-api-burst="100" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150364 4690 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150374 4690 flags.go:64] FLAG: --kube-api-qps="50" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150382 4690 flags.go:64] FLAG: --kube-reserved="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150391 4690 flags.go:64] FLAG: --kube-reserved-cgroup="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150400 4690 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150409 4690 flags.go:64] FLAG: --kubelet-cgroups="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150417 4690 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150426 4690 flags.go:64] FLAG: --lock-file="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150436 4690 flags.go:64] FLAG: --log-cadvisor-usage="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150445 4690 flags.go:64] FLAG: --log-flush-frequency="5s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150455 4690 flags.go:64] FLAG: --log-json-info-buffer-size="0" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150469 4690 flags.go:64] FLAG: --log-json-split-stream="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150477 4690 flags.go:64] FLAG: --log-text-info-buffer-size="0" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150486 4690 flags.go:64] FLAG: --log-text-split-stream="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150495 4690 flags.go:64] FLAG: --logging-format="text" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150504 4690 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150514 4690 flags.go:64] FLAG: --make-iptables-util-chains="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150522 4690 flags.go:64] FLAG: --manifest-url="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150531 4690 flags.go:64] FLAG: --manifest-url-header="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150544 4690 flags.go:64] FLAG: --max-housekeeping-interval="15s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150553 4690 flags.go:64] FLAG: --max-open-files="1000000" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150563 4690 flags.go:64] FLAG: --max-pods="110" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150572 4690 flags.go:64] FLAG: --maximum-dead-containers="-1" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150581 4690 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150590 4690 flags.go:64] FLAG: --memory-manager-policy="None" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150599 4690 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150608 4690 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150617 4690 flags.go:64] FLAG: --node-ip="192.168.126.11" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150626 4690 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150646 4690 flags.go:64] FLAG: --node-status-max-images="50" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150655 4690 flags.go:64] FLAG: --node-status-update-frequency="10s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150664 4690 flags.go:64] FLAG: --oom-score-adj="-999" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150673 4690 flags.go:64] FLAG: --pod-cidr="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150682 4690 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150703 4690 flags.go:64] FLAG: --pod-manifest-path="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150711 4690 flags.go:64] FLAG: --pod-max-pids="-1" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150720 4690 flags.go:64] FLAG: --pods-per-core="0" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150729 4690 flags.go:64] FLAG: --port="10250" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150739 4690 flags.go:64] FLAG: --protect-kernel-defaults="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150749 4690 flags.go:64] FLAG: --provider-id="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150758 4690 flags.go:64] FLAG: --qos-reserved="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150767 4690 flags.go:64] FLAG: --read-only-port="10255" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150782 4690 flags.go:64] FLAG: --register-node="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150792 4690 flags.go:64] FLAG: --register-schedulable="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150802 4690 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150817 4690 flags.go:64] FLAG: --registry-burst="10" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150826 4690 flags.go:64] FLAG: --registry-qps="5" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150835 4690 flags.go:64] FLAG: --reserved-cpus="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150891 4690 flags.go:64] FLAG: --reserved-memory="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150904 4690 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150914 4690 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150924 4690 flags.go:64] FLAG: --rotate-certificates="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150933 4690 flags.go:64] FLAG: --rotate-server-certificates="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150942 4690 flags.go:64] FLAG: --runonce="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150951 4690 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150960 4690 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150970 4690 flags.go:64] FLAG: --seccomp-default="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150978 4690 flags.go:64] FLAG: --serialize-image-pulls="true" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150987 4690 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.150997 4690 flags.go:64] FLAG: --storage-driver-db="cadvisor" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151006 4690 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151016 4690 flags.go:64] FLAG: --storage-driver-password="root" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151025 4690 flags.go:64] FLAG: --storage-driver-secure="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151033 4690 flags.go:64] FLAG: --storage-driver-table="stats" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151044 4690 flags.go:64] FLAG: --storage-driver-user="root" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151055 4690 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151067 4690 flags.go:64] FLAG: --sync-frequency="1m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151079 4690 flags.go:64] FLAG: --system-cgroups="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151089 4690 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151106 4690 flags.go:64] FLAG: --system-reserved-cgroup="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151115 4690 flags.go:64] FLAG: --tls-cert-file="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151124 4690 flags.go:64] FLAG: --tls-cipher-suites="[]" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151136 4690 flags.go:64] FLAG: --tls-min-version="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151145 4690 flags.go:64] FLAG: --tls-private-key-file="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151157 4690 flags.go:64] FLAG: --topology-manager-policy="none" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151168 4690 flags.go:64] FLAG: --topology-manager-policy-options="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151177 4690 flags.go:64] FLAG: --topology-manager-scope="container" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151186 4690 flags.go:64] FLAG: --v="2" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151198 4690 flags.go:64] FLAG: --version="false" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151209 4690 flags.go:64] FLAG: --vmodule="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151221 4690 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.151230 4690 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152380 4690 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152402 4690 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152415 4690 feature_gate.go:330] unrecognized feature gate: NewOLM Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152426 4690 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152440 4690 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152450 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152460 4690 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152469 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152479 4690 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152489 4690 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152500 4690 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152510 4690 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152522 4690 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152534 4690 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152545 4690 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152556 4690 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152567 4690 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152578 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152589 4690 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152598 4690 feature_gate.go:330] unrecognized feature gate: OVNObservability Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152614 4690 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152626 4690 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152638 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152650 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152667 4690 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152677 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152690 4690 feature_gate.go:330] unrecognized feature gate: PinnedImages Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152700 4690 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152713 4690 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152725 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152739 4690 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152752 4690 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152762 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152772 4690 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152784 4690 feature_gate.go:330] unrecognized feature gate: GatewayAPI Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152794 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152806 4690 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152815 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152824 4690 feature_gate.go:330] unrecognized feature gate: SignatureStores Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152834 4690 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152874 4690 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152884 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152893 4690 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152902 4690 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152911 4690 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152920 4690 feature_gate.go:330] unrecognized feature gate: Example Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152930 4690 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152939 4690 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152949 4690 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152958 4690 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152967 4690 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152977 4690 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152987 4690 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.152996 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153005 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153015 4690 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153029 4690 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153039 4690 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153049 4690 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153059 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153068 4690 feature_gate.go:330] unrecognized feature gate: PlatformOperators Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153078 4690 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153088 4690 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153097 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153106 4690 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153115 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153126 4690 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153139 4690 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153150 4690 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153161 4690 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.153176 4690 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.153204 4690 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.165954 4690 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.165997 4690 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166095 4690 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166106 4690 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166113 4690 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166119 4690 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166125 4690 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166131 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166138 4690 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166147 4690 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166154 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166159 4690 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166165 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166170 4690 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166176 4690 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166182 4690 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166187 4690 feature_gate.go:330] unrecognized feature gate: NewOLM Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166193 4690 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166199 4690 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166204 4690 feature_gate.go:330] unrecognized feature gate: OVNObservability Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166209 4690 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166214 4690 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166219 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166224 4690 feature_gate.go:330] unrecognized feature gate: PinnedImages Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166229 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166234 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166239 4690 feature_gate.go:330] unrecognized feature gate: SignatureStores Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166245 4690 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166251 4690 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166256 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166261 4690 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166266 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166272 4690 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166277 4690 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166282 4690 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166289 4690 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166304 4690 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166310 4690 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166315 4690 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166322 4690 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166329 4690 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166334 4690 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166340 4690 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166346 4690 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166351 4690 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166356 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166361 4690 feature_gate.go:330] unrecognized feature gate: Example Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166366 4690 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166371 4690 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166376 4690 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166381 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166386 4690 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166391 4690 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166397 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166402 4690 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166407 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166412 4690 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166419 4690 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166424 4690 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166429 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166434 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166439 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166445 4690 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166450 4690 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166455 4690 feature_gate.go:330] unrecognized feature gate: PlatformOperators Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166460 4690 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166465 4690 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166470 4690 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166475 4690 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166481 4690 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166486 4690 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166491 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166504 4690 feature_gate.go:330] unrecognized feature gate: GatewayAPI Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.166515 4690 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166746 4690 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166758 4690 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166763 4690 feature_gate.go:330] unrecognized feature gate: NewOLM Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166769 4690 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166774 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166780 4690 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166785 4690 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166791 4690 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166796 4690 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166809 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166815 4690 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166820 4690 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166825 4690 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166830 4690 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166865 4690 feature_gate.go:330] unrecognized feature gate: PlatformOperators Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166870 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166876 4690 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166881 4690 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166886 4690 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166891 4690 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166896 4690 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166902 4690 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166907 4690 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166912 4690 feature_gate.go:330] unrecognized feature gate: GatewayAPI Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166918 4690 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166923 4690 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166928 4690 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166933 4690 feature_gate.go:330] unrecognized feature gate: OVNObservability Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166938 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166943 4690 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166950 4690 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166959 4690 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166966 4690 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166972 4690 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166987 4690 feature_gate.go:330] unrecognized feature gate: Example Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.166994 4690 feature_gate.go:330] unrecognized feature gate: PinnedImages Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167000 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167005 4690 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167011 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167016 4690 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167022 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167027 4690 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167033 4690 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167038 4690 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167043 4690 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167050 4690 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167056 4690 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167062 4690 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167068 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167074 4690 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167081 4690 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167087 4690 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167094 4690 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167099 4690 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167105 4690 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167110 4690 feature_gate.go:330] unrecognized feature gate: SignatureStores Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167115 4690 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167121 4690 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167126 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167131 4690 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167136 4690 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167141 4690 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167146 4690 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167152 4690 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167157 4690 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167164 4690 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167171 4690 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167177 4690 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167182 4690 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167188 4690 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.167205 4690 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.167214 4690 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.168598 4690 server.go:940] "Client rotation is on, will bootstrap in background" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.173645 4690 bootstrap.go:266] "Unhandled Error" err="part of the existing bootstrap client certificate in /var/lib/kubelet/kubeconfig is expired: 2026-02-24 05:52:08 +0000 UTC" logger="UnhandledError" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.181739 4690 bootstrap.go:101] "Use the bootstrap credentials to request a cert, and set kubeconfig to point to the certificate dir" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.181981 4690 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.184565 4690 server.go:997] "Starting client certificate rotation" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.184612 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.184807 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.210527 4690 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.212127 4690 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.213597 4690 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.228584 4690 log.go:25] "Validated CRI v1 runtime API" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.272080 4690 log.go:25] "Validated CRI v1 image API" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.275026 4690 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.282108 4690 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-03-20-13-18-16-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.282189 4690 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.316250 4690 manager.go:217] Machine: {Timestamp:2026-03-20 13:22:34.313651325 +0000 UTC m=+0.603251338 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654132736 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:0709f114-e447-44a1-aacc-6ba4cd210e43 BootID:64cb4bde-531a-46e3-b83b-f3ca53756a20 Filesystems:[{Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730829824 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827068416 Type:vfs Inodes:1048576 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:60:34:d5 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:60:34:d5 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:ef:ad:f5 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:81:48:81 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:13:f8:b1 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:20:07:af Speed:-1 Mtu:1496} {Name:eth10 MacAddress:fa:f0:3f:fa:5b:b3 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:4a:e5:8f:1c:b5:91 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654132736 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.316629 4690 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.316883 4690 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.318709 4690 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.319058 4690 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.319116 4690 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.319467 4690 topology_manager.go:138] "Creating topology manager with none policy" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.319485 4690 container_manager_linux.go:303] "Creating device plugin manager" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.320174 4690 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.320221 4690 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.321485 4690 state_mem.go:36] "Initialized new in-memory state store" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.321635 4690 server.go:1245] "Using root directory" path="/var/lib/kubelet" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.327122 4690 kubelet.go:418] "Attempting to sync node with API server" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.327156 4690 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.327179 4690 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.327203 4690 kubelet.go:324] "Adding apiserver pod source" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.327223 4690 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.332892 4690 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.334420 4690 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.335023 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.335047 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.335188 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.335246 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.338756 4690 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343004 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343068 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343086 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343101 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343125 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343142 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343158 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343181 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343198 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343214 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343236 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.343250 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.345609 4690 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.346291 4690 server.go:1280] "Started kubelet" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.346575 4690 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.346709 4690 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.347774 4690 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.348508 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:34 crc systemd[1]: Started Kubernetes Kubelet. Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.350618 4690 server.go:460] "Adding debug handlers to kubelet server" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.351503 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.351566 4690 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.351745 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.351887 4690 volume_manager.go:287] "The desired_state_of_world populator starts" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.351920 4690 volume_manager.go:289] "Starting Kubelet Volume Manager" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.351957 4690 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.353193 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.353296 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.353921 4690 factory.go:55] Registering systemd factory Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.353956 4690 factory.go:221] Registration of the systemd container factory successfully Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.354300 4690 factory.go:153] Registering CRI-O factory Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.354345 4690 factory.go:221] Registration of the crio container factory successfully Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.354429 4690 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.354466 4690 factory.go:103] Registering Raw factory Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.354491 4690 manager.go:1196] Started watching for new ooms in manager Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.355292 4690 manager.go:319] Starting recovery of all containers Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.355380 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="200ms" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362413 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362484 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362506 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362524 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362539 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362555 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362570 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362588 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362606 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362624 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362642 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362657 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362672 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362690 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362705 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362720 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362738 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362754 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362770 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.362835 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.363678 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.363701 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.363719 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.363769 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.363789 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.366736 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.366884 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.366922 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.366970 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.366991 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367023 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367560 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367611 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367641 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367725 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367753 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367772 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367789 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367940 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367964 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.367988 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368009 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368043 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368070 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368096 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368193 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.363997 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.204:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.189e8f6202bfd121 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.346246433 +0000 UTC m=+0.635846376,LastTimestamp:2026-03-20 13:22:34.346246433 +0000 UTC m=+0.635846376,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368211 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368310 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368410 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368491 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368513 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368531 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.368731 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370022 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370277 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370331 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370678 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370749 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370778 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370816 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370870 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370905 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.370935 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.371087 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.371383 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.371597 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372437 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372458 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372472 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372487 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372501 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372514 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372528 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372542 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372555 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372567 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372581 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372594 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372609 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372623 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372638 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372652 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372664 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372676 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372688 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372701 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372714 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372726 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372739 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372751 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372793 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372813 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372829 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372864 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372881 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372896 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372911 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372926 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372942 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372959 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372972 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.372986 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.373003 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374790 4690 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374830 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374876 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374895 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374911 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374929 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374945 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374961 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374975 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.374995 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375011 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375026 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375045 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375060 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375075 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375088 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375104 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375119 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375134 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375151 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375166 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375182 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375197 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375213 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375227 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375242 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375255 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375272 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375287 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375301 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375315 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375331 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375347 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375363 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375381 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375395 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375411 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375426 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375440 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375467 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375484 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375497 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375512 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375527 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375543 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375557 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375573 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375590 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375604 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375619 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375635 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375651 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375665 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375718 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375734 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375749 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375763 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375777 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375792 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375806 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375821 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375835 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375869 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375886 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375899 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375914 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375934 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375951 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375966 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375982 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.375996 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376012 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376026 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376042 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376056 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376073 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376088 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376103 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376117 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376131 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376147 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376168 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376256 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376277 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376292 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376309 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376329 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376346 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376362 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376378 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376396 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376412 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376425 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376441 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376456 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376470 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376485 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376498 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376515 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376529 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376543 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376558 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376572 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376589 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376603 4690 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376617 4690 reconstruct.go:97] "Volume reconstruction finished" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.376627 4690 reconciler.go:26] "Reconciler: start to sync state" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.389219 4690 manager.go:324] Recovery completed Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.404458 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.408009 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.408075 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.408091 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.409119 4690 cpu_manager.go:225] "Starting CPU manager" policy="none" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.409143 4690 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.409166 4690 state_mem.go:36] "Initialized new in-memory state store" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.410382 4690 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.412791 4690 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.413125 4690 status_manager.go:217] "Starting to sync pod status with apiserver" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.413174 4690 kubelet.go:2335] "Starting kubelet main sync loop" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.413331 4690 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Mar 20 13:22:34 crc kubenswrapper[4690]: W0320 13:22:34.413733 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.413782 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.452220 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.475377 4690 policy_none.go:49] "None policy: Start" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.476517 4690 memory_manager.go:170] "Starting memorymanager" policy="None" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.476560 4690 state_mem.go:35] "Initializing new in-memory state store" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.514043 4690 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.552756 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.556615 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="400ms" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.557106 4690 manager.go:334] "Starting Device Plugin manager" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.557198 4690 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.557217 4690 server.go:79] "Starting device plugin registration server" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.557670 4690 eviction_manager.go:189] "Eviction manager: starting control loop" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.557697 4690 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.557804 4690 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.557977 4690 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.557988 4690 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.564411 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.658462 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.659984 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.660048 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.660072 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.660121 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.660812 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.204:6443: connect: connection refused" node="crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.714747 4690 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.714922 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.716553 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.716598 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.716617 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.716811 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.717147 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.717216 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.717832 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.717895 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.717911 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.718046 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.718198 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.718255 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.718525 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.718567 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.718584 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.718941 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.718971 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.718986 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.719114 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.719307 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.719362 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.719316 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.719465 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.719489 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.719929 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.719963 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.719977 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.720121 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.720355 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.720421 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.720742 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.720762 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.720770 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.721503 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.721566 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.721590 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.722136 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.722191 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.722497 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.722538 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.722562 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.723134 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.723183 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.723201 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781552 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781628 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781661 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781691 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781731 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781758 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781799 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781823 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781873 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781923 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781947 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.781971 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.782098 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.782190 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.782243 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.862077 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.863816 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.863908 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.863935 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.863982 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.864752 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.204:6443: connect: connection refused" node="crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.883982 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884047 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884084 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884116 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884150 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884184 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884246 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884286 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884315 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884342 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884342 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884386 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884390 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884430 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884462 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884372 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884503 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884337 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884556 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884562 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884566 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884589 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884667 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884696 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884714 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884792 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884813 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884866 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884891 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: I0320 13:22:34.884926 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Mar 20 13:22:34 crc kubenswrapper[4690]: E0320 13:22:34.957959 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="800ms" Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.058449 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.077560 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Mar 20 13:22:35 crc kubenswrapper[4690]: W0320 13:22:35.094754 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-e615e9600b7b711477cad879324c210965675601ce003a7827e098b2708caf28 WatchSource:0}: Error finding container e615e9600b7b711477cad879324c210965675601ce003a7827e098b2708caf28: Status 404 returned error can't find the container with id e615e9600b7b711477cad879324c210965675601ce003a7827e098b2708caf28 Mar 20 13:22:35 crc kubenswrapper[4690]: W0320 13:22:35.109874 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-7e8b14b3917e196b3d090c9e8248dacbcc9386131c50d53ae655f1e02a3605c9 WatchSource:0}: Error finding container 7e8b14b3917e196b3d090c9e8248dacbcc9386131c50d53ae655f1e02a3605c9: Status 404 returned error can't find the container with id 7e8b14b3917e196b3d090c9e8248dacbcc9386131c50d53ae655f1e02a3605c9 Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.118987 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:35 crc kubenswrapper[4690]: W0320 13:22:35.140324 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-481bc4c80cc9287a4056e80d52d0fd6ccbb172c0712f07b1cf2f2ec34e33966e WatchSource:0}: Error finding container 481bc4c80cc9287a4056e80d52d0fd6ccbb172c0712f07b1cf2f2ec34e33966e: Status 404 returned error can't find the container with id 481bc4c80cc9287a4056e80d52d0fd6ccbb172c0712f07b1cf2f2ec34e33966e Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.145155 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.161422 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:22:35 crc kubenswrapper[4690]: W0320 13:22:35.162970 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-ef926359b96c18db67ea5e91964658ef41121036e44bfc93ce757d44636773e0 WatchSource:0}: Error finding container ef926359b96c18db67ea5e91964658ef41121036e44bfc93ce757d44636773e0: Status 404 returned error can't find the container with id ef926359b96c18db67ea5e91964658ef41121036e44bfc93ce757d44636773e0 Mar 20 13:22:35 crc kubenswrapper[4690]: W0320 13:22:35.173462 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-4926335c1af14c712ea0b0f5095da09c97da01ba9d09883e57d4e1f82c245814 WatchSource:0}: Error finding container 4926335c1af14c712ea0b0f5095da09c97da01ba9d09883e57d4e1f82c245814: Status 404 returned error can't find the container with id 4926335c1af14c712ea0b0f5095da09c97da01ba9d09883e57d4e1f82c245814 Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.265198 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.266940 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.266981 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.266992 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.267024 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:22:35 crc kubenswrapper[4690]: E0320 13:22:35.267682 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.204:6443: connect: connection refused" node="crc" Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.349374 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.418419 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"481bc4c80cc9287a4056e80d52d0fd6ccbb172c0712f07b1cf2f2ec34e33966e"} Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.420077 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7e8b14b3917e196b3d090c9e8248dacbcc9386131c50d53ae655f1e02a3605c9"} Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.421147 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"e615e9600b7b711477cad879324c210965675601ce003a7827e098b2708caf28"} Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.422229 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4926335c1af14c712ea0b0f5095da09c97da01ba9d09883e57d4e1f82c245814"} Mar 20 13:22:35 crc kubenswrapper[4690]: I0320 13:22:35.423192 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ef926359b96c18db67ea5e91964658ef41121036e44bfc93ce757d44636773e0"} Mar 20 13:22:35 crc kubenswrapper[4690]: W0320 13:22:35.488408 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:35 crc kubenswrapper[4690]: E0320 13:22:35.488563 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:35 crc kubenswrapper[4690]: W0320 13:22:35.583298 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:35 crc kubenswrapper[4690]: E0320 13:22:35.583396 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:35 crc kubenswrapper[4690]: W0320 13:22:35.743066 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:35 crc kubenswrapper[4690]: E0320 13:22:35.743969 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:35 crc kubenswrapper[4690]: E0320 13:22:35.759683 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="1.6s" Mar 20 13:22:35 crc kubenswrapper[4690]: W0320 13:22:35.802719 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:35 crc kubenswrapper[4690]: E0320 13:22:35.802866 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.068117 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.069619 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.069650 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.069662 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.069690 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:22:36 crc kubenswrapper[4690]: E0320 13:22:36.070371 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.204:6443: connect: connection refused" node="crc" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.352793 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.412283 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 13:22:36 crc kubenswrapper[4690]: E0320 13:22:36.413257 4690 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.428985 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8d73770fe767dfbe3ba9d648041de8aebb202eca97f775f5162dd291e39d9cc3"} Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.429052 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"aa821d2408b518728ebb082a1668cab249f0bb2cfb1d9b0d08ad757ab569dbc5"} Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.429067 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1"} Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.429079 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de"} Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.430875 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516" exitCode=0 Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.430948 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516"} Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.431004 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.432268 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.432310 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.432322 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.433191 4690 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28" exitCode=0 Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.433263 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28"} Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.433461 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.434047 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.434714 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.434905 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.434956 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.435024 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.435040 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.435231 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.436358 4690 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260" exitCode=0 Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.436410 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260"} Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.436486 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.437581 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.437607 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.437619 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.439807 4690 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9" exitCode=0 Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.439932 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9"} Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.440056 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.441475 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.441528 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:36 crc kubenswrapper[4690]: I0320 13:22:36.441548 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.349558 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:37 crc kubenswrapper[4690]: E0320 13:22:37.361178 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="3.2s" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.447789 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"37231baa7335f4a160515c92702922b5c3e33d30b11b67edd146d015edc2a199"} Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.448020 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.449151 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.449252 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.449323 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.452895 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5112fb8ae9a44756acabacd15d9f1c29b5b96b39fc8ef42f488c15058fc5679f"} Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.453091 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d7157d887bc71eb3e9b9d3a40054646b9b47f64a296d7a63890bcef190e26fea"} Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.453105 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"43bb4e5a138b082a96715b6e5c220e87020b4480a60f63c9c79669a0cce317fb"} Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.453447 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.456131 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.456174 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.456187 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.460065 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad"} Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.460117 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0"} Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.460131 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1"} Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.460143 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376"} Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.462745 4690 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4" exitCode=0 Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.462809 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4"} Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.462858 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.462932 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.463783 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.463811 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.463820 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.464184 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.464219 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.464232 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:37 crc kubenswrapper[4690]: W0320 13:22:37.470701 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:37 crc kubenswrapper[4690]: E0320 13:22:37.470777 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.566263 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.671203 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.673319 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.673346 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.673358 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:37 crc kubenswrapper[4690]: I0320 13:22:37.673384 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:22:37 crc kubenswrapper[4690]: E0320 13:22:37.673775 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.204:6443: connect: connection refused" node="crc" Mar 20 13:22:37 crc kubenswrapper[4690]: W0320 13:22:37.845300 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.204:6443: connect: connection refused Mar 20 13:22:37 crc kubenswrapper[4690]: E0320 13:22:37.845380 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.204:6443: connect: connection refused" logger="UnhandledError" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.211468 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.470041 4690 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6" exitCode=0 Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.470176 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6"} Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.470331 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.471815 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.471892 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.471909 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.476286 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5d9cf75db8803fcc5922d915c41d9365d4ef946bfb995af95b8dd3bcf01d943d"} Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.476327 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.476431 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.476478 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.476503 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.478038 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.478111 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.478128 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.479068 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.479117 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.479135 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.479683 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.479729 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.479746 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.480511 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.480558 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:38 crc kubenswrapper[4690]: I0320 13:22:38.480582 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.491966 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c"} Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.492029 4690 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.492052 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb"} Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.492115 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f"} Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.492099 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.492117 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.492133 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4"} Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.493742 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.493804 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.493822 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.494142 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.494195 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.494210 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:39 crc kubenswrapper[4690]: I0320 13:22:39.712001 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.498759 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff"} Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.498837 4690 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.498912 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.498918 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.500107 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.500140 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.500152 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.500233 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.500263 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.500276 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.507881 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.566924 4690 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.567026 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.705374 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.849254 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.874276 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.875753 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.875801 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.875813 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:40 crc kubenswrapper[4690]: I0320 13:22:40.875838 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:22:41 crc kubenswrapper[4690]: I0320 13:22:41.501650 4690 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Mar 20 13:22:41 crc kubenswrapper[4690]: I0320 13:22:41.501710 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:41 crc kubenswrapper[4690]: I0320 13:22:41.501751 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:41 crc kubenswrapper[4690]: I0320 13:22:41.502799 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:41 crc kubenswrapper[4690]: I0320 13:22:41.502834 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:41 crc kubenswrapper[4690]: I0320 13:22:41.502868 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:41 crc kubenswrapper[4690]: I0320 13:22:41.503038 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:41 crc kubenswrapper[4690]: I0320 13:22:41.503076 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:41 crc kubenswrapper[4690]: I0320 13:22:41.503093 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.504409 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.505713 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.505765 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.505783 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.869645 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.869984 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.871712 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.871780 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.871800 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.953961 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.954184 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.955917 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.955971 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:42 crc kubenswrapper[4690]: I0320 13:22:42.955992 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:43 crc kubenswrapper[4690]: I0320 13:22:43.168692 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:43 crc kubenswrapper[4690]: I0320 13:22:43.176923 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:43 crc kubenswrapper[4690]: I0320 13:22:43.507890 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:43 crc kubenswrapper[4690]: I0320 13:22:43.507948 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:43 crc kubenswrapper[4690]: I0320 13:22:43.510478 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:43 crc kubenswrapper[4690]: I0320 13:22:43.510529 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:43 crc kubenswrapper[4690]: I0320 13:22:43.510548 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:44 crc kubenswrapper[4690]: I0320 13:22:44.510306 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:44 crc kubenswrapper[4690]: I0320 13:22:44.511145 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:44 crc kubenswrapper[4690]: I0320 13:22:44.511176 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:44 crc kubenswrapper[4690]: I0320 13:22:44.511187 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:44 crc kubenswrapper[4690]: E0320 13:22:44.564498 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 13:22:45 crc kubenswrapper[4690]: I0320 13:22:45.393320 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:22:45 crc kubenswrapper[4690]: I0320 13:22:45.514235 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:45 crc kubenswrapper[4690]: I0320 13:22:45.515908 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:45 crc kubenswrapper[4690]: I0320 13:22:45.515943 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:45 crc kubenswrapper[4690]: I0320 13:22:45.515954 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:46 crc kubenswrapper[4690]: I0320 13:22:46.730301 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Mar 20 13:22:46 crc kubenswrapper[4690]: I0320 13:22:46.730586 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:46 crc kubenswrapper[4690]: I0320 13:22:46.732636 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:46 crc kubenswrapper[4690]: I0320 13:22:46.732704 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:46 crc kubenswrapper[4690]: I0320 13:22:46.732723 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.350400 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Mar 20 13:22:48 crc kubenswrapper[4690]: W0320 13:22:48.361018 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.361109 4690 trace.go:236] Trace[1139153663]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (20-Mar-2026 13:22:38.359) (total time: 10001ms): Mar 20 13:22:48 crc kubenswrapper[4690]: Trace[1139153663]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:22:48.361) Mar 20 13:22:48 crc kubenswrapper[4690]: Trace[1139153663]: [10.001402406s] [10.001402406s] END Mar 20 13:22:48 crc kubenswrapper[4690]: E0320 13:22:48.361132 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Mar 20 13:22:48 crc kubenswrapper[4690]: W0320 13:22:48.438915 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.439045 4690 trace.go:236] Trace[566671446]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (20-Mar-2026 13:22:38.437) (total time: 10001ms): Mar 20 13:22:48 crc kubenswrapper[4690]: Trace[566671446]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (13:22:48.438) Mar 20 13:22:48 crc kubenswrapper[4690]: Trace[566671446]: [10.001751436s] [10.001751436s] END Mar 20 13:22:48 crc kubenswrapper[4690]: E0320 13:22:48.439083 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.524462 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.527319 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5d9cf75db8803fcc5922d915c41d9365d4ef946bfb995af95b8dd3bcf01d943d" exitCode=255 Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.527378 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"5d9cf75db8803fcc5922d915c41d9365d4ef946bfb995af95b8dd3bcf01d943d"} Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.527626 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.528790 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.528822 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.528839 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.529596 4690 scope.go:117] "RemoveContainer" containerID="5d9cf75db8803fcc5922d915c41d9365d4ef946bfb995af95b8dd3bcf01d943d" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.591791 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:48 crc kubenswrapper[4690]: E0320 13:22:48.775301 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:48Z is after 2026-02-23T05:33:13Z" node="crc" Mar 20 13:22:48 crc kubenswrapper[4690]: W0320 13:22:48.778013 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:48Z is after 2026-02-23T05:33:13Z Mar 20 13:22:48 crc kubenswrapper[4690]: E0320 13:22:48.778184 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:48Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 13:22:48 crc kubenswrapper[4690]: W0320 13:22:48.782509 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:48Z is after 2026-02-23T05:33:13Z Mar 20 13:22:48 crc kubenswrapper[4690]: E0320 13:22:48.782593 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:48Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.784727 4690 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.784806 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Mar 20 13:22:48 crc kubenswrapper[4690]: E0320 13:22:48.788432 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:48Z is after 2026-02-23T05:33:13Z" event="&Event{ObjectMeta:{crc.189e8f6202bfd121 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.346246433 +0000 UTC m=+0.635846376,LastTimestamp:2026-03-20 13:22:34.346246433 +0000 UTC m=+0.635846376,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.796447 4690 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Mar 20 13:22:48 crc kubenswrapper[4690]: I0320 13:22:48.796531 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Mar 20 13:22:48 crc kubenswrapper[4690]: E0320 13:22:48.796977 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:48Z is after 2026-02-23T05:33:13Z" interval="6.4s" Mar 20 13:22:48 crc kubenswrapper[4690]: E0320 13:22:48.800730 4690 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:48Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 13:22:49 crc kubenswrapper[4690]: I0320 13:22:49.353185 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:49Z is after 2026-02-23T05:33:13Z Mar 20 13:22:49 crc kubenswrapper[4690]: I0320 13:22:49.532733 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Mar 20 13:22:49 crc kubenswrapper[4690]: I0320 13:22:49.536287 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef"} Mar 20 13:22:49 crc kubenswrapper[4690]: I0320 13:22:49.536525 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:49 crc kubenswrapper[4690]: I0320 13:22:49.537898 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:49 crc kubenswrapper[4690]: I0320 13:22:49.537937 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:49 crc kubenswrapper[4690]: I0320 13:22:49.537950 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.353032 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:50Z is after 2026-02-23T05:33:13Z Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.540148 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.541190 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.543614 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef" exitCode=255 Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.543681 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef"} Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.543760 4690 scope.go:117] "RemoveContainer" containerID="5d9cf75db8803fcc5922d915c41d9365d4ef946bfb995af95b8dd3bcf01d943d" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.543766 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.546051 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.546077 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.546087 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.546623 4690 scope.go:117] "RemoveContainer" containerID="74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef" Mar 20 13:22:50 crc kubenswrapper[4690]: E0320 13:22:50.547189 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.567661 4690 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.567721 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 13:22:50 crc kubenswrapper[4690]: I0320 13:22:50.860122 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:51 crc kubenswrapper[4690]: I0320 13:22:51.352739 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:51Z is after 2026-02-23T05:33:13Z Mar 20 13:22:51 crc kubenswrapper[4690]: I0320 13:22:51.549600 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Mar 20 13:22:51 crc kubenswrapper[4690]: I0320 13:22:51.552905 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:51 crc kubenswrapper[4690]: I0320 13:22:51.554321 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:51 crc kubenswrapper[4690]: I0320 13:22:51.554369 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:51 crc kubenswrapper[4690]: I0320 13:22:51.554386 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:51 crc kubenswrapper[4690]: I0320 13:22:51.555404 4690 scope.go:117] "RemoveContainer" containerID="74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef" Mar 20 13:22:51 crc kubenswrapper[4690]: E0320 13:22:51.555729 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:22:51 crc kubenswrapper[4690]: I0320 13:22:51.560567 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:52 crc kubenswrapper[4690]: I0320 13:22:52.353414 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:52Z is after 2026-02-23T05:33:13Z Mar 20 13:22:52 crc kubenswrapper[4690]: I0320 13:22:52.555445 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:52 crc kubenswrapper[4690]: I0320 13:22:52.557256 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:52 crc kubenswrapper[4690]: I0320 13:22:52.557294 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:52 crc kubenswrapper[4690]: I0320 13:22:52.557302 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:52 crc kubenswrapper[4690]: I0320 13:22:52.557944 4690 scope.go:117] "RemoveContainer" containerID="74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef" Mar 20 13:22:52 crc kubenswrapper[4690]: E0320 13:22:52.558115 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:22:52 crc kubenswrapper[4690]: W0320 13:22:52.818567 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:52Z is after 2026-02-23T05:33:13Z Mar 20 13:22:52 crc kubenswrapper[4690]: E0320 13:22:52.818669 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:52Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 13:22:52 crc kubenswrapper[4690]: I0320 13:22:52.954375 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:53 crc kubenswrapper[4690]: I0320 13:22:53.352643 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:53Z is after 2026-02-23T05:33:13Z Mar 20 13:22:53 crc kubenswrapper[4690]: I0320 13:22:53.558217 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:53 crc kubenswrapper[4690]: I0320 13:22:53.560356 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:53 crc kubenswrapper[4690]: I0320 13:22:53.560407 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:53 crc kubenswrapper[4690]: I0320 13:22:53.560425 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:53 crc kubenswrapper[4690]: I0320 13:22:53.561269 4690 scope.go:117] "RemoveContainer" containerID="74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef" Mar 20 13:22:53 crc kubenswrapper[4690]: E0320 13:22:53.561541 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:22:54 crc kubenswrapper[4690]: I0320 13:22:54.352253 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:54Z is after 2026-02-23T05:33:13Z Mar 20 13:22:54 crc kubenswrapper[4690]: I0320 13:22:54.560403 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:54 crc kubenswrapper[4690]: I0320 13:22:54.561544 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:54 crc kubenswrapper[4690]: I0320 13:22:54.561617 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:54 crc kubenswrapper[4690]: I0320 13:22:54.561627 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:54 crc kubenswrapper[4690]: I0320 13:22:54.562354 4690 scope.go:117] "RemoveContainer" containerID="74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef" Mar 20 13:22:54 crc kubenswrapper[4690]: E0320 13:22:54.562550 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:22:54 crc kubenswrapper[4690]: E0320 13:22:54.564776 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 13:22:54 crc kubenswrapper[4690]: W0320 13:22:54.750028 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:54Z is after 2026-02-23T05:33:13Z Mar 20 13:22:54 crc kubenswrapper[4690]: E0320 13:22:54.750183 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:54Z is after 2026-02-23T05:33:13Z" logger="UnhandledError" Mar 20 13:22:55 crc kubenswrapper[4690]: I0320 13:22:55.176028 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:55 crc kubenswrapper[4690]: I0320 13:22:55.177601 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:55 crc kubenswrapper[4690]: I0320 13:22:55.177644 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:55 crc kubenswrapper[4690]: I0320 13:22:55.177658 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:55 crc kubenswrapper[4690]: I0320 13:22:55.177682 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:22:55 crc kubenswrapper[4690]: E0320 13:22:55.182182 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:55Z is after 2026-02-23T05:33:13Z" node="crc" Mar 20 13:22:55 crc kubenswrapper[4690]: E0320 13:22:55.202529 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:55Z is after 2026-02-23T05:33:13Z" interval="7s" Mar 20 13:22:55 crc kubenswrapper[4690]: I0320 13:22:55.354733 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:22:55Z is after 2026-02-23T05:33:13Z Mar 20 13:22:56 crc kubenswrapper[4690]: I0320 13:22:56.354392 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:22:56 crc kubenswrapper[4690]: I0320 13:22:56.769117 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Mar 20 13:22:56 crc kubenswrapper[4690]: I0320 13:22:56.769421 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:56 crc kubenswrapper[4690]: I0320 13:22:56.770851 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:56 crc kubenswrapper[4690]: I0320 13:22:56.771008 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:56 crc kubenswrapper[4690]: I0320 13:22:56.771096 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:56 crc kubenswrapper[4690]: I0320 13:22:56.789755 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Mar 20 13:22:56 crc kubenswrapper[4690]: I0320 13:22:56.922240 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Mar 20 13:22:56 crc kubenswrapper[4690]: I0320 13:22:56.938061 4690 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Mar 20 13:22:57 crc kubenswrapper[4690]: I0320 13:22:57.350431 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:22:57 crc kubenswrapper[4690]: I0320 13:22:57.568717 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:57 crc kubenswrapper[4690]: I0320 13:22:57.569966 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:57 crc kubenswrapper[4690]: I0320 13:22:57.570007 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:57 crc kubenswrapper[4690]: I0320 13:22:57.570019 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:58 crc kubenswrapper[4690]: I0320 13:22:58.356744 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:22:58 crc kubenswrapper[4690]: I0320 13:22:58.591742 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:22:58 crc kubenswrapper[4690]: I0320 13:22:58.592048 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:22:58 crc kubenswrapper[4690]: I0320 13:22:58.593600 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:22:58 crc kubenswrapper[4690]: I0320 13:22:58.593651 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:22:58 crc kubenswrapper[4690]: I0320 13:22:58.593667 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:22:58 crc kubenswrapper[4690]: I0320 13:22:58.594530 4690 scope.go:117] "RemoveContainer" containerID="74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.594849 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.798221 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f6202bfd121 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.346246433 +0000 UTC m=+0.635846376,LastTimestamp:2026-03-20 13:22:34.346246433 +0000 UTC m=+0.635846376,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.806122 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f0731 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408060721 +0000 UTC m=+0.697660704,LastTimestamp:2026-03-20 13:22:34.408060721 +0000 UTC m=+0.697660704,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.813574 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f665a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408085082 +0000 UTC m=+0.697685045,LastTimestamp:2026-03-20 13:22:34.408085082 +0000 UTC m=+0.697685045,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.820568 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f9e22 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408099362 +0000 UTC m=+0.697699325,LastTimestamp:2026-03-20 13:22:34.408099362 +0000 UTC m=+0.697699325,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.827377 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f620f99f597 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeAllocatableEnforced,Message:Updated Node Allocatable limit across pods,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.561869207 +0000 UTC m=+0.851469150,LastTimestamp:2026-03-20 13:22:34.561869207 +0000 UTC m=+0.851469150,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.838754 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f0731\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f0731 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408060721 +0000 UTC m=+0.697660704,LastTimestamp:2026-03-20 13:22:34.660026129 +0000 UTC m=+0.949626102,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.853092 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f665a\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f665a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408085082 +0000 UTC m=+0.697685045,LastTimestamp:2026-03-20 13:22:34.66006271 +0000 UTC m=+0.949662693,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.861009 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f9e22\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f9e22 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408099362 +0000 UTC m=+0.697699325,LastTimestamp:2026-03-20 13:22:34.660084011 +0000 UTC m=+0.949683984,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.867197 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f0731\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f0731 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408060721 +0000 UTC m=+0.697660704,LastTimestamp:2026-03-20 13:22:34.716580688 +0000 UTC m=+1.006180641,Count:3,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.874157 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f665a\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f665a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408085082 +0000 UTC m=+0.697685045,LastTimestamp:2026-03-20 13:22:34.716609769 +0000 UTC m=+1.006209722,Count:3,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.880808 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f9e22\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f9e22 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408099362 +0000 UTC m=+0.697699325,LastTimestamp:2026-03-20 13:22:34.716626779 +0000 UTC m=+1.006226732,Count:3,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.888140 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f0731\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f0731 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408060721 +0000 UTC m=+0.697660704,LastTimestamp:2026-03-20 13:22:34.717879035 +0000 UTC m=+1.007478998,Count:4,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.896948 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f665a\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f665a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408085082 +0000 UTC m=+0.697685045,LastTimestamp:2026-03-20 13:22:34.717905876 +0000 UTC m=+1.007505839,Count:4,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.903346 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f9e22\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f9e22 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408099362 +0000 UTC m=+0.697699325,LastTimestamp:2026-03-20 13:22:34.717920036 +0000 UTC m=+1.007519999,Count:4,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.910062 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f0731\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f0731 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408060721 +0000 UTC m=+0.697660704,LastTimestamp:2026-03-20 13:22:34.718553154 +0000 UTC m=+1.008153137,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.916965 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f665a\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f665a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408085082 +0000 UTC m=+0.697685045,LastTimestamp:2026-03-20 13:22:34.718577845 +0000 UTC m=+1.008177818,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.923602 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f9e22\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f9e22 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408099362 +0000 UTC m=+0.697699325,LastTimestamp:2026-03-20 13:22:34.718594245 +0000 UTC m=+1.008194228,Count:5,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.929535 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f0731\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f0731 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408060721 +0000 UTC m=+0.697660704,LastTimestamp:2026-03-20 13:22:34.718962586 +0000 UTC m=+1.008562549,Count:6,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.937702 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f665a\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f665a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408085082 +0000 UTC m=+0.697685045,LastTimestamp:2026-03-20 13:22:34.718980396 +0000 UTC m=+1.008580349,Count:6,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.943455 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f9e22\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f9e22 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408099362 +0000 UTC m=+0.697699325,LastTimestamp:2026-03-20 13:22:34.718994807 +0000 UTC m=+1.008594770,Count:6,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.947466 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f0731\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f0731 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408060721 +0000 UTC m=+0.697660704,LastTimestamp:2026-03-20 13:22:34.71944958 +0000 UTC m=+1.009049563,Count:7,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.952665 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f665a\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f665a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408085082 +0000 UTC m=+0.697685045,LastTimestamp:2026-03-20 13:22:34.71947759 +0000 UTC m=+1.009077563,Count:7,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.974024 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f9e22\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f9e22 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientPID,Message:Node crc status is now: NodeHasSufficientPID,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408099362 +0000 UTC m=+0.697699325,LastTimestamp:2026-03-20 13:22:34.719501131 +0000 UTC m=+1.009101114,Count:7,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.980764 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f0731\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f0731 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasSufficientMemory,Message:Node crc status is now: NodeHasSufficientMemory,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408060721 +0000 UTC m=+0.697660704,LastTimestamp:2026-03-20 13:22:34.719956524 +0000 UTC m=+1.009556467,Count:8,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.987317 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"crc.189e8f62066f665a\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"default\"" event="&Event{ObjectMeta:{crc.189e8f62066f665a default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:NodeHasNoDiskPressure,Message:Node crc status is now: NodeHasNoDiskPressure,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:34.408085082 +0000 UTC m=+0.697685045,LastTimestamp:2026-03-20 13:22:34.719969264 +0000 UTC m=+1.009569207,Count:8,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.991556 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e8f622fa9516c openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.099746668 +0000 UTC m=+1.389346611,LastTimestamp:2026-03-20 13:22:35.099746668 +0000 UTC m=+1.389346611,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.995515 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f6230997e60 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.115486816 +0000 UTC m=+1.405086759,LastTimestamp:2026-03-20 13:22:35.115486816 +0000 UTC m=+1.405086759,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:58 crc kubenswrapper[4690]: E0320 13:22:58.999888 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62324d9f23 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.144068899 +0000 UTC m=+1.433668842,LastTimestamp:2026-03-20 13:22:35.144068899 +0000 UTC m=+1.433668842,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.004079 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f6233a1fe3c openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.166375484 +0000 UTC m=+1.455975437,LastTimestamp:2026-03-20 13:22:35.166375484 +0000 UTC m=+1.455975437,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.007199 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f62343834c4 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{wait-for-host-port},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.176219844 +0000 UTC m=+1.465819787,LastTimestamp:2026-03-20 13:22:35.176219844 +0000 UTC m=+1.465819787,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.010692 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f6252e69e13 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:Created,Message:Created container kube-controller-manager,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.690966547 +0000 UTC m=+1.980566500,LastTimestamp:2026-03-20 13:22:35.690966547 +0000 UTC m=+1.980566500,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.014116 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f625334ed7b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Created,Message:Created container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.696098683 +0000 UTC m=+1.985698626,LastTimestamp:2026-03-20 13:22:35.696098683 +0000 UTC m=+1.985698626,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.017190 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e8f6253388ba4 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Created,Message:Created container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.69633578 +0000 UTC m=+1.985935743,LastTimestamp:2026-03-20 13:22:35.69633578 +0000 UTC m=+1.985935743,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.021046 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f62533a7e72 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{wait-for-host-port},},Reason:Created,Message:Created container wait-for-host-port,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.696463474 +0000 UTC m=+1.986063427,LastTimestamp:2026-03-20 13:22:35.696463474 +0000 UTC m=+1.986063427,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.024918 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f62535b58af openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager},},Reason:Started,Message:Started container kube-controller-manager,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.698616495 +0000 UTC m=+1.988216438,LastTimestamp:2026-03-20 13:22:35.698616495 +0000 UTC m=+1.988216438,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.032718 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f62536a71e9 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Created,Message:Created container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.699605993 +0000 UTC m=+1.989205946,LastTimestamp:2026-03-20 13:22:35.699605993 +0000 UTC m=+1.989205946,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.036994 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f6253861b35 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.701418805 +0000 UTC m=+1.991018768,LastTimestamp:2026-03-20 13:22:35.701418805 +0000 UTC m=+1.991018768,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.040763 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e8f62542dcfb7 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Started,Message:Started container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.712409527 +0000 UTC m=+2.002009490,LastTimestamp:2026-03-20 13:22:35.712409527 +0000 UTC m=+2.002009490,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.044095 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62545a828b openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Started,Message:Started container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.715338891 +0000 UTC m=+2.004938854,LastTimestamp:2026-03-20 13:22:35.715338891 +0000 UTC m=+2.004938854,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.047528 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f62545eba88 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{wait-for-host-port},},Reason:Started,Message:Started container wait-for-host-port,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.715615368 +0000 UTC m=+2.005215321,LastTimestamp:2026-03-20 13:22:35.715615368 +0000 UTC m=+2.005215321,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.049472 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f625484031a openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{setup},},Reason:Started,Message:Started container setup,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.718058778 +0000 UTC m=+2.007658741,LastTimestamp:2026-03-20 13:22:35.718058778 +0000 UTC m=+2.007658741,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.052270 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f6266dd61da openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Created,Message:Created container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.025905626 +0000 UTC m=+2.315505619,LastTimestamp:2026-03-20 13:22:36.025905626 +0000 UTC m=+2.315505619,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.055447 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f626799cd8f openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Started,Message:Started container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.038253967 +0000 UTC m=+2.327853950,LastTimestamp:2026-03-20 13:22:36.038253967 +0000 UTC m=+2.327853950,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.058971 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f6267b518ac openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-cert-syncer},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.040042668 +0000 UTC m=+2.329642651,LastTimestamp:2026-03-20 13:22:36.040042668 +0000 UTC m=+2.329642651,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.063697 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f627499e2de openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-cert-syncer},},Reason:Created,Message:Created container kube-controller-manager-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.25636323 +0000 UTC m=+2.545963173,LastTimestamp:2026-03-20 13:22:36.25636323 +0000 UTC m=+2.545963173,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.067279 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f6275266961 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-cert-syncer},},Reason:Started,Message:Started container kube-controller-manager-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.265572705 +0000 UTC m=+2.555172648,LastTimestamp:2026-03-20 13:22:36.265572705 +0000 UTC m=+2.555172648,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.070840 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f62753aa9dc openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-recovery-controller},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.266899932 +0000 UTC m=+2.556499875,LastTimestamp:2026-03-20 13:22:36.266899932 +0000 UTC m=+2.556499875,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.075297 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f627e112213 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-recovery-controller},},Reason:Created,Message:Created container kube-controller-manager-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.415173139 +0000 UTC m=+2.704773082,LastTimestamp:2026-03-20 13:22:36.415173139 +0000 UTC m=+2.704773082,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.080439 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f627f2db9a6 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.433824166 +0000 UTC m=+2.723424109,LastTimestamp:2026-03-20 13:22:36.433824166 +0000 UTC m=+2.723424109,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.084588 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f627f6f7fa7 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-ensure-env-vars},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.438134695 +0000 UTC m=+2.727734648,LastTimestamp:2026-03-20 13:22:36.438134695 +0000 UTC m=+2.727734648,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.089092 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e8f627f79eb8a openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.438817674 +0000 UTC m=+2.728417617,LastTimestamp:2026-03-20 13:22:36.438817674 +0000 UTC m=+2.728417617,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.094348 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f627fbf45f0 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.4433628 +0000 UTC m=+2.732962743,LastTimestamp:2026-03-20 13:22:36.4433628 +0000 UTC m=+2.732962743,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.097947 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f62801ac066 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-controller-manager-recovery-controller},},Reason:Started,Message:Started container kube-controller-manager-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.449357926 +0000 UTC m=+2.738957869,LastTimestamp:2026-03-20 13:22:36.449357926 +0000 UTC m=+2.738957869,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.101106 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f628c9f092e openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Created,Message:Created container kube-scheduler,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.659353902 +0000 UTC m=+2.948953845,LastTimestamp:2026-03-20 13:22:36.659353902 +0000 UTC m=+2.948953845,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.104340 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e8f628cc05279 openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Created,Message:Created container kube-rbac-proxy-crio,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.661535353 +0000 UTC m=+2.951135296,LastTimestamp:2026-03-20 13:22:36.661535353 +0000 UTC m=+2.951135296,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.107539 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f628cc5f7bf openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Created,Message:Created container kube-apiserver,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.661905343 +0000 UTC m=+2.951505276,LastTimestamp:2026-03-20 13:22:36.661905343 +0000 UTC m=+2.951505276,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.110612 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f628ccc5f49 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-ensure-env-vars},},Reason:Created,Message:Created container etcd-ensure-env-vars,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.662325065 +0000 UTC m=+2.951925008,LastTimestamp:2026-03-20 13:22:36.662325065 +0000 UTC m=+2.951925008,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.113517 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f628d78b9cb openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler},},Reason:Started,Message:Started container kube-scheduler,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.673620427 +0000 UTC m=+2.963220370,LastTimestamp:2026-03-20 13:22:36.673620427 +0000 UTC m=+2.963220370,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.116992 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f628d8c68a5 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-cert-syncer},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.674910373 +0000 UTC m=+2.964510316,LastTimestamp:2026-03-20 13:22:36.674910373 +0000 UTC m=+2.964510316,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.120676 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f628da88432 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Started,Message:Started container kube-apiserver,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.676752434 +0000 UTC m=+2.966352377,LastTimestamp:2026-03-20 13:22:36.676752434 +0000 UTC m=+2.966352377,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.123953 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f628e0abb97 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-ensure-env-vars},},Reason:Started,Message:Started container etcd-ensure-env-vars,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.683189143 +0000 UTC m=+2.972789076,LastTimestamp:2026-03-20 13:22:36.683189143 +0000 UTC m=+2.972789076,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.127448 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-machine-config-operator\"" event="&Event{ObjectMeta:{kube-rbac-proxy-crio-crc.189e8f628e0caf4a openshift-machine-config-operator 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-machine-config-operator,Name:kube-rbac-proxy-crio-crc,UID:d1b160f5dda77d281dd8e69ec8d817f9,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-rbac-proxy-crio},},Reason:Started,Message:Started container kube-rbac-proxy-crio,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.683317066 +0000 UTC m=+2.972917009,LastTimestamp:2026-03-20 13:22:36.683317066 +0000 UTC m=+2.972917009,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.130525 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f628e2e26a3 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.685510307 +0000 UTC m=+2.975110250,LastTimestamp:2026-03-20 13:22:36.685510307 +0000 UTC m=+2.975110250,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.133950 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f6298f69949 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-cert-syncer},},Reason:Created,Message:Created container kube-scheduler-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.866419017 +0000 UTC m=+3.156018960,LastTimestamp:2026-03-20 13:22:36.866419017 +0000 UTC m=+3.156018960,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.137731 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f629903dcf7 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Created,Message:Created container kube-apiserver-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.867288311 +0000 UTC m=+3.156888264,LastTimestamp:2026-03-20 13:22:36.867288311 +0000 UTC m=+3.156888264,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.141902 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f6299ef1cd2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-syncer},},Reason:Started,Message:Started container kube-apiserver-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.882705618 +0000 UTC m=+3.172305561,LastTimestamp:2026-03-20 13:22:36.882705618 +0000 UTC m=+3.172305561,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.145584 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f6299fed288 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-regeneration-controller},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.883735176 +0000 UTC m=+3.173335129,LastTimestamp:2026-03-20 13:22:36.883735176 +0000 UTC m=+3.173335129,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.152661 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f629a0cbffc openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-cert-syncer},},Reason:Started,Message:Started container kube-scheduler-cert-syncer,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.884647932 +0000 UTC m=+3.174247875,LastTimestamp:2026-03-20 13:22:36.884647932 +0000 UTC m=+3.174247875,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.153828 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f629a1abfad openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-recovery-controller},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.885565357 +0000 UTC m=+3.175165310,LastTimestamp:2026-03-20 13:22:36.885565357 +0000 UTC m=+3.175165310,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.158211 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f62a76ab69b openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-recovery-controller},},Reason:Created,Message:Created container kube-scheduler-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.108909723 +0000 UTC m=+3.398509666,LastTimestamp:2026-03-20 13:22:37.108909723 +0000 UTC m=+3.398509666,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.163754 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62a7ad6ca8 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-regeneration-controller},},Reason:Created,Message:Created container kube-apiserver-cert-regeneration-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.113281704 +0000 UTC m=+3.402881657,LastTimestamp:2026-03-20 13:22:37.113281704 +0000 UTC m=+3.402881657,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.168110 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-scheduler\"" event="&Event{ObjectMeta:{openshift-kube-scheduler-crc.189e8f62a8413486 openshift-kube-scheduler 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-scheduler,Name:openshift-kube-scheduler-crc,UID:3dcd261975c3d6b9a6ad6367fd4facd3,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-scheduler-recovery-controller},},Reason:Started,Message:Started container kube-scheduler-recovery-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.122966662 +0000 UTC m=+3.412566605,LastTimestamp:2026-03-20 13:22:37.122966662 +0000 UTC m=+3.412566605,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.173012 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62a879ee19 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-cert-regeneration-controller},},Reason:Started,Message:Started container kube-apiserver-cert-regeneration-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.126684185 +0000 UTC m=+3.416284128,LastTimestamp:2026-03-20 13:22:37.126684185 +0000 UTC m=+3.416284128,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.177542 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62a8897efa openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-insecure-readyz},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.127704314 +0000 UTC m=+3.417304267,LastTimestamp:2026-03-20 13:22:37.127704314 +0000 UTC m=+3.417304267,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.181673 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62b5263fff openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-insecure-readyz},},Reason:Created,Message:Created container kube-apiserver-insecure-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.339303935 +0000 UTC m=+3.628903888,LastTimestamp:2026-03-20 13:22:37.339303935 +0000 UTC m=+3.628903888,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.185628 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62b5d6089e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-insecure-readyz},},Reason:Started,Message:Started container kube-apiserver-insecure-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.350824094 +0000 UTC m=+3.640424047,LastTimestamp:2026-03-20 13:22:37.350824094 +0000 UTC m=+3.640424047,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.189117 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62b5e6eaec openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.351930604 +0000 UTC m=+3.641530557,LastTimestamp:2026-03-20 13:22:37.351930604 +0000 UTC m=+3.641530557,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.194103 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f62bcad748f openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-resources-copy},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.465605263 +0000 UTC m=+3.755205206,LastTimestamp:2026-03-20 13:22:37.465605263 +0000 UTC m=+3.755205206,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.198276 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62c1bb71e2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Created,Message:Created container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.550408162 +0000 UTC m=+3.840008105,LastTimestamp:2026-03-20 13:22:37.550408162 +0000 UTC m=+3.840008105,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.202800 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62c2514606 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Started,Message:Started container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.560227334 +0000 UTC m=+3.849827277,LastTimestamp:2026-03-20 13:22:37.560227334 +0000 UTC m=+3.849827277,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.209150 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f62c78ef9f1 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-resources-copy},},Reason:Created,Message:Created container etcd-resources-copy,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.648157169 +0000 UTC m=+3.937757112,LastTimestamp:2026-03-20 13:22:37.648157169 +0000 UTC m=+3.937757112,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.215796 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f62c81e2103 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.initContainers{etcd-resources-copy},},Reason:Started,Message:Started container etcd-resources-copy,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.657538819 +0000 UTC m=+3.947138752,LastTimestamp:2026-03-20 13:22:37.657538819 +0000 UTC m=+3.947138752,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.223476 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f62f8d3eaa8 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:38.474758824 +0000 UTC m=+4.764358797,LastTimestamp:2026-03-20 13:22:38.474758824 +0000 UTC m=+4.764358797,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.229963 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f6305ab19c9 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Created,Message:Created container etcdctl,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:38.690187721 +0000 UTC m=+4.979787674,LastTimestamp:2026-03-20 13:22:38.690187721 +0000 UTC m=+4.979787674,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.236494 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f63065036c5 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcdctl},},Reason:Started,Message:Started container etcdctl,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:38.701008581 +0000 UTC m=+4.990608544,LastTimestamp:2026-03-20 13:22:38.701008581 +0000 UTC m=+4.990608544,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.240818 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f630663820c openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:38.702273036 +0000 UTC m=+4.991872989,LastTimestamp:2026-03-20 13:22:38.702273036 +0000 UTC m=+4.991872989,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.247286 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f6314e6dfee openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Created,Message:Created container etcd,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:38.94576331 +0000 UTC m=+5.235363263,LastTimestamp:2026-03-20 13:22:38.94576331 +0000 UTC m=+5.235363263,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.251674 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f631637140e openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd},},Reason:Started,Message:Started container etcd,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:38.96779675 +0000 UTC m=+5.257396733,LastTimestamp:2026-03-20 13:22:38.96779675 +0000 UTC m=+5.257396733,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.254940 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f63164f82c3 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-metrics},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:38.969397955 +0000 UTC m=+5.258997938,LastTimestamp:2026-03-20 13:22:38.969397955 +0000 UTC m=+5.258997938,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.258423 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f632552bd77 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-metrics},},Reason:Created,Message:Created container etcd-metrics,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:39.221267831 +0000 UTC m=+5.510867774,LastTimestamp:2026-03-20 13:22:39.221267831 +0000 UTC m=+5.510867774,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.261970 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f632605c430 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-metrics},},Reason:Started,Message:Started container etcd-metrics,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:39.233000496 +0000 UTC m=+5.522600459,LastTimestamp:2026-03-20 13:22:39.233000496 +0000 UTC m=+5.522600459,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.265066 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f632616ec6b openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-readyz},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:39.234124907 +0000 UTC m=+5.523724870,LastTimestamp:2026-03-20 13:22:39.234124907 +0000 UTC m=+5.523724870,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.268939 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f6330c73942 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-readyz},},Reason:Created,Message:Created container etcd-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:39.413451074 +0000 UTC m=+5.703051017,LastTimestamp:2026-03-20 13:22:39.413451074 +0000 UTC m=+5.703051017,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.272803 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f6331841afb openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-readyz},},Reason:Started,Message:Started container etcd-readyz,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:39.425829627 +0000 UTC m=+5.715429570,LastTimestamp:2026-03-20 13:22:39.425829627 +0000 UTC m=+5.715429570,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.276218 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f63319a926e openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-rev},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:39.427301998 +0000 UTC m=+5.716901941,LastTimestamp:2026-03-20 13:22:39.427301998 +0000 UTC m=+5.716901941,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.280363 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f634286ceca openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-rev},},Reason:Created,Message:Created container etcd-rev,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:39.711219402 +0000 UTC m=+6.000819345,LastTimestamp:2026-03-20 13:22:39.711219402 +0000 UTC m=+6.000819345,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.283469 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-etcd\"" event="&Event{ObjectMeta:{etcd-crc.189e8f63433a60c6 openshift-etcd 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-etcd,Name:etcd-crc,UID:2139d3e2895fc6797b9c76a1b4c9886d,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{etcd-rev},},Reason:Started,Message:Started container etcd-rev,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:39.722987718 +0000 UTC m=+6.012587661,LastTimestamp:2026-03-20 13:22:39.722987718 +0000 UTC m=+6.012587661,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.287509 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Mar 20 13:22:59 crc kubenswrapper[4690]: &Event{ObjectMeta:{kube-controller-manager-crc.189e8f637588dd7e openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": context deadline exceeded (Client.Timeout exceeded while awaiting headers) Mar 20 13:22:59 crc kubenswrapper[4690]: body: Mar 20 13:22:59 crc kubenswrapper[4690]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:40.566992254 +0000 UTC m=+6.856592227,LastTimestamp:2026-03-20 13:22:40.566992254 +0000 UTC m=+6.856592227,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 13:22:59 crc kubenswrapper[4690]: > Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.291739 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f63758a27da openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:40.567076826 +0000 UTC m=+6.856676809,LastTimestamp:2026-03-20 13:22:40.567076826 +0000 UTC m=+6.856676809,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.296233 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.189e8f62b5e6eaec\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62b5e6eaec openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.351930604 +0000 UTC m=+3.641530557,LastTimestamp:2026-03-20 13:22:48.530886936 +0000 UTC m=+14.820486919,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.299602 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.189e8f62c1bb71e2\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62c1bb71e2 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Created,Message:Created container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.550408162 +0000 UTC m=+3.840008105,LastTimestamp:2026-03-20 13:22:48.762353007 +0000 UTC m=+15.051952990,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.302708 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.189e8f62c2514606\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f62c2514606 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver-check-endpoints},},Reason:Started,Message:Started container kube-apiserver-check-endpoints,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:37.560227334 +0000 UTC m=+3.849827277,LastTimestamp:2026-03-20 13:22:48.780095269 +0000 UTC m=+15.069695252,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.306389 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event=< Mar 20 13:22:59 crc kubenswrapper[4690]: &Event{ObjectMeta:{kube-apiserver-crc.189e8f655f5a6345 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:ProbeError,Message:Startup probe error: HTTP probe failed with statuscode: 403 Mar 20 13:22:59 crc kubenswrapper[4690]: body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Mar 20 13:22:59 crc kubenswrapper[4690]: Mar 20 13:22:59 crc kubenswrapper[4690]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:48.784782149 +0000 UTC m=+15.074382132,LastTimestamp:2026-03-20 13:22:48.784782149 +0000 UTC m=+15.074382132,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 13:22:59 crc kubenswrapper[4690]: > Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.309414 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f655f5b407c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Unhealthy,Message:Startup probe failed: HTTP probe failed with statuscode: 403,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:48.78483878 +0000 UTC m=+15.074438763,LastTimestamp:2026-03-20 13:22:48.78483878 +0000 UTC m=+15.074438763,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.312598 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.189e8f655f5a6345\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event=< Mar 20 13:22:59 crc kubenswrapper[4690]: &Event{ObjectMeta:{kube-apiserver-crc.189e8f655f5a6345 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:ProbeError,Message:Startup probe error: HTTP probe failed with statuscode: 403 Mar 20 13:22:59 crc kubenswrapper[4690]: body: {"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Mar 20 13:22:59 crc kubenswrapper[4690]: Mar 20 13:22:59 crc kubenswrapper[4690]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:48.784782149 +0000 UTC m=+15.074382132,LastTimestamp:2026-03-20 13:22:48.796508543 +0000 UTC m=+15.086108496,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 13:22:59 crc kubenswrapper[4690]: > Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.316234 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-apiserver-crc.189e8f655f5b407c\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-apiserver\"" event="&Event{ObjectMeta:{kube-apiserver-crc.189e8f655f5b407c openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-crc,UID:f4b27818a5e8e43d0dc095d08835c792,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{kube-apiserver},},Reason:Unhealthy,Message:Startup probe failed: HTTP probe failed with statuscode: 403,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:48.78483878 +0000 UTC m=+15.074438763,LastTimestamp:2026-03-20 13:22:48.796555835 +0000 UTC m=+15.086155788,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.321004 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Mar 20 13:22:59 crc kubenswrapper[4690]: &Event{ObjectMeta:{kube-controller-manager-crc.189e8f65c99f9fca openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Mar 20 13:22:59 crc kubenswrapper[4690]: body: Mar 20 13:22:59 crc kubenswrapper[4690]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:50.567704522 +0000 UTC m=+16.857304465,LastTimestamp:2026-03-20 13:22:50.567704522 +0000 UTC m=+16.857304465,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 13:22:59 crc kubenswrapper[4690]: > Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.324623 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f65c9a0430a openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:50.567746314 +0000 UTC m=+16.857346257,LastTimestamp:2026-03-20 13:22:50.567746314 +0000 UTC m=+16.857346257,Count:1,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:22:59 crc kubenswrapper[4690]: I0320 13:22:59.354438 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:22:59 crc kubenswrapper[4690]: W0320 13:22:59.876775 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: runtimeclasses.node.k8s.io is forbidden: User "system:anonymous" cannot list resource "runtimeclasses" in API group "node.k8s.io" at the cluster scope Mar 20 13:22:59 crc kubenswrapper[4690]: E0320 13:22:59.876933 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: runtimeclasses.node.k8s.io is forbidden: User \"system:anonymous\" cannot list resource \"runtimeclasses\" in API group \"node.k8s.io\" at the cluster scope" logger="UnhandledError" Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.353005 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.567283 4690 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.567371 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.567419 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.567549 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.569041 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.569108 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.569126 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.569927 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="cluster-policy-controller" containerStatusID={"Type":"cri-o","ID":"8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container cluster-policy-controller failed startup probe, will be restarted" Mar 20 13:23:00 crc kubenswrapper[4690]: I0320 13:23:00.570243 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" containerID="cri-o://8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1" gracePeriod=30 Mar 20 13:23:00 crc kubenswrapper[4690]: E0320 13:23:00.574630 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e8f65c99f9fca\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Mar 20 13:23:00 crc kubenswrapper[4690]: &Event{ObjectMeta:{kube-controller-manager-crc.189e8f65c99f9fca openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Mar 20 13:23:00 crc kubenswrapper[4690]: body: Mar 20 13:23:00 crc kubenswrapper[4690]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:50.567704522 +0000 UTC m=+16.857304465,LastTimestamp:2026-03-20 13:23:00.56735117 +0000 UTC m=+26.856951113,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 13:23:00 crc kubenswrapper[4690]: > Mar 20 13:23:00 crc kubenswrapper[4690]: E0320 13:23:00.582044 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e8f65c9a0430a\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f65c9a0430a openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:50.567746314 +0000 UTC m=+16.857346257,LastTimestamp:2026-03-20 13:23:00.567391741 +0000 UTC m=+26.856991684,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:23:00 crc kubenswrapper[4690]: W0320 13:23:00.582141 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:anonymous" cannot list resource "services" in API group "" at the cluster scope Mar 20 13:23:00 crc kubenswrapper[4690]: E0320 13:23:00.582476 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:anonymous\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError" Mar 20 13:23:00 crc kubenswrapper[4690]: E0320 13:23:00.589199 4690 event.go:359] "Server rejected event (will not retry!)" err="events is forbidden: User \"system:anonymous\" cannot create resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f681dd1cc53 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Killing,Message:Container cluster-policy-controller failed startup probe, will be restarted,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:23:00.570213459 +0000 UTC m=+26.859813462,LastTimestamp:2026-03-20 13:23:00.570213459 +0000 UTC m=+26.859813462,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:23:00 crc kubenswrapper[4690]: E0320 13:23:00.699122 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e8f6253861b35\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f6253861b35 openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Pulled,Message:Container image \"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:35.701418805 +0000 UTC m=+1.991018768,LastTimestamp:2026-03-20 13:23:00.692769534 +0000 UTC m=+26.982369487,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:23:00 crc kubenswrapper[4690]: E0320 13:23:00.915553 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e8f6266dd61da\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f6266dd61da openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Created,Message:Created container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.025905626 +0000 UTC m=+2.315505619,LastTimestamp:2026-03-20 13:23:00.907713787 +0000 UTC m=+27.197313820,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:23:00 crc kubenswrapper[4690]: E0320 13:23:00.929739 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e8f626799cd8f\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f626799cd8f openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Started,Message:Started container cluster-policy-controller,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:36.038253967 +0000 UTC m=+2.327853950,LastTimestamp:2026-03-20 13:23:00.922028924 +0000 UTC m=+27.211628897,Count:2,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:23:01 crc kubenswrapper[4690]: I0320 13:23:01.357811 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:01 crc kubenswrapper[4690]: I0320 13:23:01.585271 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Mar 20 13:23:01 crc kubenswrapper[4690]: I0320 13:23:01.586208 4690 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1" exitCode=255 Mar 20 13:23:01 crc kubenswrapper[4690]: I0320 13:23:01.586307 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1"} Mar 20 13:23:01 crc kubenswrapper[4690]: I0320 13:23:01.586370 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"137ba462df423c3672b46a99fcf109ef7015e562a9148a33af4213a4451a1577"} Mar 20 13:23:01 crc kubenswrapper[4690]: I0320 13:23:01.586527 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:01 crc kubenswrapper[4690]: I0320 13:23:01.588114 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:01 crc kubenswrapper[4690]: I0320 13:23:01.588173 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:01 crc kubenswrapper[4690]: I0320 13:23:01.588192 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.182451 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.183972 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.184004 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.184016 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.184039 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:23:02 crc kubenswrapper[4690]: E0320 13:23:02.190997 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Mar 20 13:23:02 crc kubenswrapper[4690]: E0320 13:23:02.211026 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.354809 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.870355 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.870957 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.872778 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.872834 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:02 crc kubenswrapper[4690]: I0320 13:23:02.872847 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:03 crc kubenswrapper[4690]: I0320 13:23:03.354948 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:04 crc kubenswrapper[4690]: W0320 13:23:04.009882 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:anonymous" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:04 crc kubenswrapper[4690]: E0320 13:23:04.009968 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:anonymous\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" Mar 20 13:23:04 crc kubenswrapper[4690]: I0320 13:23:04.360177 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:04 crc kubenswrapper[4690]: E0320 13:23:04.565051 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 13:23:05 crc kubenswrapper[4690]: W0320 13:23:05.184366 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes "crc" is forbidden: User "system:anonymous" cannot list resource "nodes" in API group "" at the cluster scope Mar 20 13:23:05 crc kubenswrapper[4690]: E0320 13:23:05.184738 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes \"crc\" is forbidden: User \"system:anonymous\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError" Mar 20 13:23:05 crc kubenswrapper[4690]: I0320 13:23:05.352835 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:06 crc kubenswrapper[4690]: I0320 13:23:06.357467 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:07 crc kubenswrapper[4690]: I0320 13:23:07.358187 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:07 crc kubenswrapper[4690]: I0320 13:23:07.566730 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:23:07 crc kubenswrapper[4690]: I0320 13:23:07.567101 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:07 crc kubenswrapper[4690]: I0320 13:23:07.569053 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:07 crc kubenswrapper[4690]: I0320 13:23:07.569227 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:07 crc kubenswrapper[4690]: I0320 13:23:07.569242 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:08 crc kubenswrapper[4690]: I0320 13:23:08.356217 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.191790 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.193439 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.193665 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.193929 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.194220 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:23:09 crc kubenswrapper[4690]: E0320 13:23:09.201692 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Mar 20 13:23:09 crc kubenswrapper[4690]: E0320 13:23:09.218782 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.357199 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.414343 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.416040 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.416162 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.416187 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:09 crc kubenswrapper[4690]: I0320 13:23:09.417230 4690 scope.go:117] "RemoveContainer" containerID="74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef" Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.353420 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.566725 4690 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.566797 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Mar 20 13:23:10 crc kubenswrapper[4690]: E0320 13:23:10.570821 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e8f65c99f9fca\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event=< Mar 20 13:23:10 crc kubenswrapper[4690]: &Event{ObjectMeta:{kube-controller-manager-crc.189e8f65c99f9fca openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:ProbeError,Message:Startup probe error: Get "https://192.168.126.11:10357/healthz": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers) Mar 20 13:23:10 crc kubenswrapper[4690]: body: Mar 20 13:23:10 crc kubenswrapper[4690]: ,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:50.567704522 +0000 UTC m=+16.857304465,LastTimestamp:2026-03-20 13:23:10.566774671 +0000 UTC m=+36.856374614,Count:3,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,} Mar 20 13:23:10 crc kubenswrapper[4690]: > Mar 20 13:23:10 crc kubenswrapper[4690]: E0320 13:23:10.574621 4690 event.go:359] "Server rejected event (will not retry!)" err="events \"kube-controller-manager-crc.189e8f65c9a0430a\" is forbidden: User \"system:anonymous\" cannot patch resource \"events\" in API group \"\" in the namespace \"openshift-kube-controller-manager\"" event="&Event{ObjectMeta:{kube-controller-manager-crc.189e8f65c9a0430a openshift-kube-controller-manager 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-controller-manager,Name:kube-controller-manager-crc,UID:f614b9022728cf315e60c057852e563e,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{cluster-policy-controller},},Reason:Unhealthy,Message:Startup probe failed: Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers),Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:22:50.567746314 +0000 UTC m=+16.857346257,LastTimestamp:2026-03-20 13:23:10.566819042 +0000 UTC m=+36.856418985,Count:3,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.611510 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.612214 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.613982 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e85182cdee7fa17e7e5738a9b6e7618d6f31f536fe8fc32db6df5eabd12dd375" exitCode=255 Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.614026 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"e85182cdee7fa17e7e5738a9b6e7618d6f31f536fe8fc32db6df5eabd12dd375"} Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.614064 4690 scope.go:117] "RemoveContainer" containerID="74b37bb751599f940ca563a8b82065228046df083737c86d91c935d1c483a4ef" Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.614173 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.615423 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.615454 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.615463 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:10 crc kubenswrapper[4690]: I0320 13:23:10.615994 4690 scope.go:117] "RemoveContainer" containerID="e85182cdee7fa17e7e5738a9b6e7618d6f31f536fe8fc32db6df5eabd12dd375" Mar 20 13:23:10 crc kubenswrapper[4690]: E0320 13:23:10.616176 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:23:11 crc kubenswrapper[4690]: I0320 13:23:11.352357 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:11 crc kubenswrapper[4690]: I0320 13:23:11.619100 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Mar 20 13:23:12 crc kubenswrapper[4690]: I0320 13:23:12.356560 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:12 crc kubenswrapper[4690]: I0320 13:23:12.954651 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:23:12 crc kubenswrapper[4690]: I0320 13:23:12.954931 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:12 crc kubenswrapper[4690]: I0320 13:23:12.956543 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:12 crc kubenswrapper[4690]: I0320 13:23:12.956672 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:12 crc kubenswrapper[4690]: I0320 13:23:12.956906 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:12 crc kubenswrapper[4690]: I0320 13:23:12.957707 4690 scope.go:117] "RemoveContainer" containerID="e85182cdee7fa17e7e5738a9b6e7618d6f31f536fe8fc32db6df5eabd12dd375" Mar 20 13:23:12 crc kubenswrapper[4690]: E0320 13:23:12.958032 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:23:13 crc kubenswrapper[4690]: I0320 13:23:13.354805 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:13 crc kubenswrapper[4690]: W0320 13:23:13.821095 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: services is forbidden: User "system:anonymous" cannot list resource "services" in API group "" at the cluster scope Mar 20 13:23:13 crc kubenswrapper[4690]: E0320 13:23:13.821199 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: services is forbidden: User \"system:anonymous\" cannot list resource \"services\" in API group \"\" at the cluster scope" logger="UnhandledError" Mar 20 13:23:14 crc kubenswrapper[4690]: I0320 13:23:14.352915 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:14 crc kubenswrapper[4690]: E0320 13:23:14.565597 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 13:23:15 crc kubenswrapper[4690]: I0320 13:23:15.357648 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:16 crc kubenswrapper[4690]: I0320 13:23:16.202821 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:16 crc kubenswrapper[4690]: I0320 13:23:16.204483 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:16 crc kubenswrapper[4690]: I0320 13:23:16.204548 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:16 crc kubenswrapper[4690]: I0320 13:23:16.204573 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:16 crc kubenswrapper[4690]: I0320 13:23:16.204614 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:23:16 crc kubenswrapper[4690]: E0320 13:23:16.211812 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Mar 20 13:23:16 crc kubenswrapper[4690]: E0320 13:23:16.225264 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Mar 20 13:23:16 crc kubenswrapper[4690]: I0320 13:23:16.357288 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:17 crc kubenswrapper[4690]: I0320 13:23:17.357006 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:18 crc kubenswrapper[4690]: W0320 13:23:18.038258 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: runtimeclasses.node.k8s.io is forbidden: User "system:anonymous" cannot list resource "runtimeclasses" in API group "node.k8s.io" at the cluster scope Mar 20 13:23:18 crc kubenswrapper[4690]: E0320 13:23:18.038329 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: runtimeclasses.node.k8s.io is forbidden: User \"system:anonymous\" cannot list resource \"runtimeclasses\" in API group \"node.k8s.io\" at the cluster scope" logger="UnhandledError" Mar 20 13:23:18 crc kubenswrapper[4690]: I0320 13:23:18.361501 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:18 crc kubenswrapper[4690]: I0320 13:23:18.591125 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:23:18 crc kubenswrapper[4690]: I0320 13:23:18.591372 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:18 crc kubenswrapper[4690]: I0320 13:23:18.593336 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:18 crc kubenswrapper[4690]: I0320 13:23:18.593509 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:18 crc kubenswrapper[4690]: I0320 13:23:18.593636 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:18 crc kubenswrapper[4690]: I0320 13:23:18.595060 4690 scope.go:117] "RemoveContainer" containerID="e85182cdee7fa17e7e5738a9b6e7618d6f31f536fe8fc32db6df5eabd12dd375" Mar 20 13:23:18 crc kubenswrapper[4690]: E0320 13:23:18.595789 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.357155 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.456684 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.456988 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.458575 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.458614 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.458631 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.463617 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.644447 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.645372 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.645439 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:19 crc kubenswrapper[4690]: I0320 13:23:19.645464 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:19 crc kubenswrapper[4690]: W0320 13:23:19.913652 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: nodes "crc" is forbidden: User "system:anonymous" cannot list resource "nodes" in API group "" at the cluster scope Mar 20 13:23:19 crc kubenswrapper[4690]: E0320 13:23:19.913716 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: nodes \"crc\" is forbidden: User \"system:anonymous\" cannot list resource \"nodes\" in API group \"\" at the cluster scope" logger="UnhandledError" Mar 20 13:23:20 crc kubenswrapper[4690]: I0320 13:23:20.354328 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:21 crc kubenswrapper[4690]: I0320 13:23:21.355081 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:22 crc kubenswrapper[4690]: I0320 13:23:22.354498 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:22 crc kubenswrapper[4690]: W0320 13:23:22.986806 4690 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User "system:anonymous" cannot list resource "csidrivers" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:22 crc kubenswrapper[4690]: E0320 13:23:22.986895 4690 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: csidrivers.storage.k8s.io is forbidden: User \"system:anonymous\" cannot list resource \"csidrivers\" in API group \"storage.k8s.io\" at the cluster scope" logger="UnhandledError" Mar 20 13:23:23 crc kubenswrapper[4690]: I0320 13:23:23.212957 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:23 crc kubenswrapper[4690]: I0320 13:23:23.215427 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:23 crc kubenswrapper[4690]: I0320 13:23:23.215464 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:23 crc kubenswrapper[4690]: I0320 13:23:23.215475 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:23 crc kubenswrapper[4690]: I0320 13:23:23.215505 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:23:23 crc kubenswrapper[4690]: E0320 13:23:23.222189 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Mar 20 13:23:23 crc kubenswrapper[4690]: E0320 13:23:23.231016 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Mar 20 13:23:23 crc kubenswrapper[4690]: I0320 13:23:23.350361 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:24 crc kubenswrapper[4690]: I0320 13:23:24.354476 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:24 crc kubenswrapper[4690]: E0320 13:23:24.566291 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 13:23:25 crc kubenswrapper[4690]: I0320 13:23:25.353018 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:26 crc kubenswrapper[4690]: I0320 13:23:26.356295 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:27 crc kubenswrapper[4690]: I0320 13:23:27.353901 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:28 crc kubenswrapper[4690]: I0320 13:23:28.219416 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Mar 20 13:23:28 crc kubenswrapper[4690]: I0320 13:23:28.220250 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:28 crc kubenswrapper[4690]: I0320 13:23:28.221683 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:28 crc kubenswrapper[4690]: I0320 13:23:28.221739 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:28 crc kubenswrapper[4690]: I0320 13:23:28.221758 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:28 crc kubenswrapper[4690]: I0320 13:23:28.356886 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:29 crc kubenswrapper[4690]: I0320 13:23:29.353094 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:30 crc kubenswrapper[4690]: I0320 13:23:30.222712 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:30 crc kubenswrapper[4690]: I0320 13:23:30.224213 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:30 crc kubenswrapper[4690]: I0320 13:23:30.224263 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:30 crc kubenswrapper[4690]: I0320 13:23:30.224276 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:30 crc kubenswrapper[4690]: I0320 13:23:30.224304 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:23:30 crc kubenswrapper[4690]: E0320 13:23:30.230282 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Mar 20 13:23:30 crc kubenswrapper[4690]: E0320 13:23:30.237393 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Mar 20 13:23:30 crc kubenswrapper[4690]: I0320 13:23:30.355190 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.355460 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.413648 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.415087 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.415158 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.415206 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.416238 4690 scope.go:117] "RemoveContainer" containerID="e85182cdee7fa17e7e5738a9b6e7618d6f31f536fe8fc32db6df5eabd12dd375" Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.686667 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.690809 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9"} Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.690970 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.692439 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.692501 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:31 crc kubenswrapper[4690]: I0320 13:23:31.692521 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.354440 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.695263 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.696253 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/2.log" Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.698699 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9" exitCode=255 Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.698735 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9"} Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.698770 4690 scope.go:117] "RemoveContainer" containerID="e85182cdee7fa17e7e5738a9b6e7618d6f31f536fe8fc32db6df5eabd12dd375" Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.698955 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.700487 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.700512 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.700522 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.701046 4690 scope.go:117] "RemoveContainer" containerID="b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9" Mar 20 13:23:32 crc kubenswrapper[4690]: E0320 13:23:32.701229 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:23:32 crc kubenswrapper[4690]: I0320 13:23:32.954023 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:23:33 crc kubenswrapper[4690]: I0320 13:23:33.357438 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:33 crc kubenswrapper[4690]: I0320 13:23:33.702709 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Mar 20 13:23:33 crc kubenswrapper[4690]: I0320 13:23:33.704749 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:33 crc kubenswrapper[4690]: I0320 13:23:33.705515 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:33 crc kubenswrapper[4690]: I0320 13:23:33.705544 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:33 crc kubenswrapper[4690]: I0320 13:23:33.705552 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:33 crc kubenswrapper[4690]: I0320 13:23:33.705978 4690 scope.go:117] "RemoveContainer" containerID="b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9" Mar 20 13:23:33 crc kubenswrapper[4690]: E0320 13:23:33.706115 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:23:34 crc kubenswrapper[4690]: I0320 13:23:34.356540 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:34 crc kubenswrapper[4690]: E0320 13:23:34.566680 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 13:23:35 crc kubenswrapper[4690]: I0320 13:23:35.356223 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:36 crc kubenswrapper[4690]: I0320 13:23:36.354209 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:37 crc kubenswrapper[4690]: I0320 13:23:37.230695 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:37 crc kubenswrapper[4690]: I0320 13:23:37.232261 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:37 crc kubenswrapper[4690]: I0320 13:23:37.232304 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:37 crc kubenswrapper[4690]: I0320 13:23:37.232320 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:37 crc kubenswrapper[4690]: I0320 13:23:37.232348 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:23:37 crc kubenswrapper[4690]: E0320 13:23:37.240668 4690 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes is forbidden: User \"system:anonymous\" cannot create resource \"nodes\" in API group \"\" at the cluster scope" node="crc" Mar 20 13:23:37 crc kubenswrapper[4690]: E0320 13:23:37.240717 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="leases.coordination.k8s.io \"crc\" is forbidden: User \"system:anonymous\" cannot get resource \"leases\" in API group \"coordination.k8s.io\" in the namespace \"kube-node-lease\"" interval="7s" Mar 20 13:23:37 crc kubenswrapper[4690]: I0320 13:23:37.356966 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:38 crc kubenswrapper[4690]: I0320 13:23:38.353734 4690 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: csinodes.storage.k8s.io "crc" is forbidden: User "system:anonymous" cannot get resource "csinodes" in API group "storage.k8s.io" at the cluster scope Mar 20 13:23:38 crc kubenswrapper[4690]: I0320 13:23:38.591574 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:23:38 crc kubenswrapper[4690]: I0320 13:23:38.591765 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:38 crc kubenswrapper[4690]: I0320 13:23:38.592735 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:38 crc kubenswrapper[4690]: I0320 13:23:38.592772 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:38 crc kubenswrapper[4690]: I0320 13:23:38.592785 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:38 crc kubenswrapper[4690]: I0320 13:23:38.593333 4690 scope.go:117] "RemoveContainer" containerID="b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9" Mar 20 13:23:38 crc kubenswrapper[4690]: E0320 13:23:38.593521 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:23:38 crc kubenswrapper[4690]: I0320 13:23:38.896236 4690 csr.go:261] certificate signing request csr-nxc8l is approved, waiting to be issued Mar 20 13:23:38 crc kubenswrapper[4690]: I0320 13:23:38.905142 4690 csr.go:257] certificate signing request csr-nxc8l is issued Mar 20 13:23:39 crc kubenswrapper[4690]: I0320 13:23:39.000550 4690 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Mar 20 13:23:39 crc kubenswrapper[4690]: I0320 13:23:39.184022 4690 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Mar 20 13:23:39 crc kubenswrapper[4690]: I0320 13:23:39.413826 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:39 crc kubenswrapper[4690]: I0320 13:23:39.416315 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:39 crc kubenswrapper[4690]: I0320 13:23:39.416386 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:39 crc kubenswrapper[4690]: I0320 13:23:39.416408 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:39 crc kubenswrapper[4690]: I0320 13:23:39.906888 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-24 05:54:36 +0000 UTC, rotation deadline is 2026-11-14 05:58:58.088941522 +0000 UTC Mar 20 13:23:39 crc kubenswrapper[4690]: I0320 13:23:39.906953 4690 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 5728h35m18.181993294s for next certificate rotation Mar 20 13:23:42 crc kubenswrapper[4690]: I0320 13:23:42.812582 4690 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.241327 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.243049 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.243145 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.243175 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.243319 4690 kubelet_node_status.go:76] "Attempting to register node" node="crc" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.254434 4690 kubelet_node_status.go:115] "Node was previously registered" node="crc" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.254797 4690 kubelet_node_status.go:79] "Successfully registered node" node="crc" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.254826 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": node \"crc\" not found" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.259439 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.259475 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.259487 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.259505 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.259517 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:23:44Z","lastTransitionTime":"2026-03-20T13:23:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.276211 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.281262 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.281286 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.281296 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.281310 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.281321 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:23:44Z","lastTransitionTime":"2026-03-20T13:23:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.290343 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.293503 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.293536 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.293548 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.293562 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.293572 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:23:44Z","lastTransitionTime":"2026-03-20T13:23:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.303150 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.305907 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.305945 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.305958 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.305973 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:23:44 crc kubenswrapper[4690]: I0320 13:23:44.305984 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:23:44Z","lastTransitionTime":"2026-03-20T13:23:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.318090 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.318245 4690 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.318276 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.418783 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.519517 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.567538 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.620631 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.721435 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.822278 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:44 crc kubenswrapper[4690]: E0320 13:23:44.922593 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.023520 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.124380 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.225035 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.325397 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.425788 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.526612 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.627246 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.728560 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.829156 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:45 crc kubenswrapper[4690]: E0320 13:23:45.930276 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.031440 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.132582 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.233398 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.333585 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.434084 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.534558 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.635241 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.736529 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.836894 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:46 crc kubenswrapper[4690]: E0320 13:23:46.937792 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.038678 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.138840 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.239689 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.340369 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.441409 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.542320 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.643345 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.744503 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.844910 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:47 crc kubenswrapper[4690]: E0320 13:23:47.945945 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.046127 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.147127 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.247816 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.348355 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.449040 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.549171 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.649305 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.749498 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.850059 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:48 crc kubenswrapper[4690]: E0320 13:23:48.950543 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.051613 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.152330 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.253413 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.354086 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.455235 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.556415 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.657417 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.757775 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.858379 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:49 crc kubenswrapper[4690]: E0320 13:23:49.958812 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.060127 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.160954 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.261417 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.362120 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.462819 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.563616 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.664507 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.765483 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.866394 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:50 crc kubenswrapper[4690]: E0320 13:23:50.967499 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.067899 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.169098 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.269843 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.370654 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: I0320 13:23:51.414319 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:23:51 crc kubenswrapper[4690]: I0320 13:23:51.415914 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:51 crc kubenswrapper[4690]: I0320 13:23:51.415964 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:51 crc kubenswrapper[4690]: I0320 13:23:51.415980 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:51 crc kubenswrapper[4690]: I0320 13:23:51.417105 4690 scope.go:117] "RemoveContainer" containerID="b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.417395 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.471676 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.572244 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.672985 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.773469 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.873769 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:51 crc kubenswrapper[4690]: E0320 13:23:51.974298 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.075412 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.176201 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.276585 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.377357 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.478047 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.579461 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.679932 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.780991 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.881412 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:52 crc kubenswrapper[4690]: E0320 13:23:52.982562 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.083170 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.183752 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: I0320 13:23:53.274725 4690 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.283886 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.384631 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.485840 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.586272 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.686703 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.787895 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.888831 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:53 crc kubenswrapper[4690]: E0320 13:23:53.989180 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.090410 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.191476 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.292416 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.392936 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.493406 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.567702 4690 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.593787 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.594982 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.599281 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.599349 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.599363 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.599385 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.599401 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:23:54Z","lastTransitionTime":"2026-03-20T13:23:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.613013 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.617120 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.617238 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.617323 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.617362 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.617413 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:23:54Z","lastTransitionTime":"2026-03-20T13:23:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.628674 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.633382 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.633423 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.633437 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.633466 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.633486 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:23:54Z","lastTransitionTime":"2026-03-20T13:23:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.645539 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.650809 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.650889 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.650906 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.650932 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:23:54 crc kubenswrapper[4690]: I0320 13:23:54.650960 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:23:54Z","lastTransitionTime":"2026-03-20T13:23:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.670451 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.670691 4690 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.694234 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.794990 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.896115 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:54 crc kubenswrapper[4690]: E0320 13:23:54.996450 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:55 crc kubenswrapper[4690]: E0320 13:23:55.097831 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:55 crc kubenswrapper[4690]: E0320 13:23:55.197997 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:55 crc kubenswrapper[4690]: E0320 13:23:55.298974 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:55 crc kubenswrapper[4690]: E0320 13:23:55.399773 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:55 crc kubenswrapper[4690]: E0320 13:23:55.500703 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:55 crc kubenswrapper[4690]: E0320 13:23:55.601019 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:55 crc kubenswrapper[4690]: E0320 13:23:55.702127 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:55 crc kubenswrapper[4690]: E0320 13:23:55.802756 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:55 crc kubenswrapper[4690]: E0320 13:23:55.903544 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.004234 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.104753 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.205936 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.306687 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.407466 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.508012 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.608634 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.709879 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.810587 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:56 crc kubenswrapper[4690]: E0320 13:23:56.911867 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.012090 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.113198 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.214046 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.314250 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.414471 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.514934 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.615805 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.716603 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.817805 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:57 crc kubenswrapper[4690]: E0320 13:23:57.918037 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.019217 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.119646 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.219783 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.319950 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.420699 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.520933 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.621572 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.722177 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.823204 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:58 crc kubenswrapper[4690]: E0320 13:23:58.923821 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.024770 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.124943 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.225434 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.326408 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.426627 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.527111 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.628128 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.728318 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.829361 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:23:59 crc kubenswrapper[4690]: E0320 13:23:59.930537 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.031532 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.131645 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.231954 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.332214 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.432350 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.533197 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.633791 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.734394 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.835314 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:00 crc kubenswrapper[4690]: E0320 13:24:00.936196 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.036555 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.137144 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.237990 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.339189 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.440340 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.540685 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.641233 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.742084 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.842496 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:01 crc kubenswrapper[4690]: E0320 13:24:01.942922 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:02 crc kubenswrapper[4690]: E0320 13:24:02.043414 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:02 crc kubenswrapper[4690]: E0320 13:24:02.143835 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:02 crc kubenswrapper[4690]: E0320 13:24:02.244944 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:02 crc kubenswrapper[4690]: E0320 13:24:02.345434 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.414164 4690 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.415742 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.415802 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.415815 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.416475 4690 scope.go:117] "RemoveContainer" containerID="b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9" Mar 20 13:24:02 crc kubenswrapper[4690]: E0320 13:24:02.416665 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 40s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Mar 20 13:24:02 crc kubenswrapper[4690]: E0320 13:24:02.446125 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:02 crc kubenswrapper[4690]: E0320 13:24:02.546552 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:02 crc kubenswrapper[4690]: E0320 13:24:02.647290 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:02 crc kubenswrapper[4690]: E0320 13:24:02.748292 4690 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.822090 4690 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.851247 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.851279 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.851288 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.851300 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.851311 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:02Z","lastTransitionTime":"2026-03-20T13:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.876225 4690 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.955067 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.955120 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.955135 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.955154 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:02 crc kubenswrapper[4690]: I0320 13:24:02.955167 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:02Z","lastTransitionTime":"2026-03-20T13:24:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.058070 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.058163 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.058189 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.058222 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.058249 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.161286 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.161358 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.161372 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.161395 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.161415 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.264357 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.264407 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.264419 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.264439 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.264451 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.366564 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.366603 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.366614 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.366629 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.366641 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.381170 4690 apiserver.go:52] "Watching apiserver" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.389761 4690 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.390152 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-machine-config-operator/machine-config-daemon-ftcqx","openshift-multus/multus-additional-cni-plugins-t4t2r","openshift-multus/network-metrics-daemon-rpcmp","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-operator/iptables-alerter-4ln5h","openshift-ovn-kubernetes/ovnkube-node-x2b7f","openshift-dns/node-resolver-hq77p","openshift-image-registry/node-ca-lgtw8","openshift-multus/multus-pgtf4","openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x","openshift-network-node-identity/network-node-identity-vrzqb"] Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.390501 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.390552 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.390661 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.390787 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.390827 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.391024 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.391348 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.391406 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.391459 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.391646 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.391718 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.391757 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.391848 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.392526 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-hq77p" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.392568 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.392626 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.393110 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.393301 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.394174 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.394583 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.396191 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.396450 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.397020 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.397489 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.398409 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.398495 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.398720 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.400278 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.400494 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.400586 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.401248 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.401308 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.402120 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.402143 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.402465 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.402592 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.402978 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.403025 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.404164 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.404652 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.404787 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.404849 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.404992 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.404994 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.404882 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.405062 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.404904 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.405366 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.405700 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.405000 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.404840 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.406361 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.406650 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.407136 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.413611 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.433686 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.448767 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.453008 4690 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.455757 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.455938 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.456337 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.456430 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.456535 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.456681 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.456805 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.456925 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457064 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457181 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457286 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457392 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457534 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457681 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457810 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457975 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.458148 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.458291 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.456543 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.456860 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457120 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457165 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457477 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457679 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.457779 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.458087 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.458264 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.458478 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.458711 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.458910 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459001 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459032 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.458497 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459279 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459462 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459592 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459698 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459834 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460062 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459309 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459355 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460185 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459512 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.459904 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460107 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460310 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460362 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460392 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460257 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460424 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460538 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460606 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460705 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460708 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460770 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460852 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460909 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460943 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460973 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461003 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461033 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461060 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461088 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461118 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461145 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461174 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461201 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461224 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461248 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461276 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461302 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.460836 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461065 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461108 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461251 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461330 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461495 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461531 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461548 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461561 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461588 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461598 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461620 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461674 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461701 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461727 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461757 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461786 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461843 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461892 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.461928 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462025 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462035 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462139 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462135 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462198 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462364 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462169 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462810 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462818 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462635 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462841 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462898 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462915 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462951 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462957 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462976 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463003 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463029 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463052 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463029 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463081 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463115 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463145 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463173 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463199 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463220 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463238 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463256 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463273 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463290 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463299 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463284 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463312 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463389 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463433 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.463907 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464050 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464263 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464334 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464377 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464415 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464443 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464469 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464496 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464523 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464550 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464575 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464604 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464628 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464651 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464676 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464702 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464732 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464759 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464786 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464814 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464842 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464892 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466972 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467068 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467093 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467121 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467146 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467169 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467189 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467208 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467225 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467242 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467261 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467283 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467301 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467324 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467343 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467360 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467380 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467396 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467414 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467432 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467450 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467525 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467544 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467560 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467836 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467874 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467892 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467909 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467928 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468034 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468059 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468077 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468095 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468118 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468140 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468158 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468192 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464334 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.462824 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464421 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464480 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.464885 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.465214 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.465238 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.465330 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.465518 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.465584 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.465662 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.465786 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466051 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466186 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466271 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.470117 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466289 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466562 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466584 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.470145 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466593 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466646 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466875 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467076 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467314 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467419 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467450 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467660 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.467729 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468019 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468066 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468193 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468272 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468331 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468365 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468491 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.468949 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.469142 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.469236 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.470475 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.470507 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.469413 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.469535 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.469601 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.469596 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.469688 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.469967 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.470048 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.466588 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.470487 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.470685 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.470913 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.470970 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471029 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471164 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471184 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471375 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471403 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471408 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471448 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471468 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471477 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471494 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471503 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471568 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471652 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471726 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.471749 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.471901 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:24:03.971878388 +0000 UTC m=+90.261478551 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.472057 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.472123 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.472333 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.472348 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.472370 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.472624 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.472944 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.473125 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.473819 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.473968 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474277 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474341 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474354 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474107 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474733 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474840 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474891 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474912 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474939 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474965 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.474985 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475004 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475024 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475018 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475044 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475065 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475084 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475091 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475105 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475126 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475144 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475177 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475201 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475221 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475238 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475257 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475275 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475292 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475310 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475328 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475347 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475364 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475383 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475400 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475421 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475440 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475461 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475481 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475501 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475521 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475540 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475561 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475558 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475580 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475603 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475620 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475640 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475658 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475676 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475694 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475719 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475737 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475754 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475774 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475794 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475814 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475832 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475856 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475888 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475906 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475926 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.475979 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-run-netns\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476015 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476041 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gh6s\" (UniqueName: \"kubernetes.io/projected/d459decc-f715-4636-bc35-963ae8133ec7-kube-api-access-5gh6s\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476060 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4l4jb\" (UniqueName: \"kubernetes.io/projected/fba1e50b-81f4-438f-b056-3f8cbee7fad1-kube-api-access-4l4jb\") pod \"node-resolver-hq77p\" (UID: \"fba1e50b-81f4-438f-b056-3f8cbee7fad1\") " pod="openshift-dns/node-resolver-hq77p" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476081 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476101 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-tuning-conf-dir\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476121 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-run-k8s-cni-cncf-io\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476139 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55l66\" (UniqueName: \"kubernetes.io/projected/d83a0d76-2d76-4202-a2f1-42b9ccb66802-kube-api-access-55l66\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476158 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-os-release\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476175 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/606df6ba-3dfe-48de-8890-9a5a0c030d23-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476194 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a72940bb-614b-417f-9e8b-bcfddae31f96-host\") pod \"node-ca-lgtw8\" (UID: \"a72940bb-614b-417f-9e8b-bcfddae31f96\") " pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476211 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-socket-dir-parent\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476229 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-hostroot\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476251 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476269 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-ovn\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476325 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/372b2734-220f-4e91-98c1-dbb9d1042273-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476346 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l74sw\" (UniqueName: \"kubernetes.io/projected/372b2734-220f-4e91-98c1-dbb9d1042273-kube-api-access-l74sw\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476364 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-var-lib-cni-multus\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476381 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-etc-kubernetes\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476399 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476420 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476439 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476459 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476476 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-netd\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476493 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-var-lib-kubelet\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476512 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-daemon-config\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476532 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/60ded650-b298-4115-8286-8969b94d4062-mcd-auth-proxy-config\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476551 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/60ded650-b298-4115-8286-8969b94d4062-proxy-tls\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476567 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-system-cni-dir\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476584 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-os-release\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476600 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdt8x\" (UniqueName: \"kubernetes.io/projected/a72940bb-614b-417f-9e8b-bcfddae31f96-kube-api-access-jdt8x\") pod \"node-ca-lgtw8\" (UID: \"a72940bb-614b-417f-9e8b-bcfddae31f96\") " pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476616 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-slash\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476633 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-etc-openvswitch\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476649 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-log-socket\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476663 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-ovn-kubernetes\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476682 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476699 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-conf-dir\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476696 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476717 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476736 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-netns\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476750 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-systemd\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476770 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-openvswitch\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476784 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-var-lib-cni-bin\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476800 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476815 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/606df6ba-3dfe-48de-8890-9a5a0c030d23-cni-binary-copy\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476830 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovn-node-metrics-cert\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476848 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frghp\" (UniqueName: \"kubernetes.io/projected/60ded650-b298-4115-8286-8969b94d4062-kube-api-access-frghp\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476884 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-system-cni-dir\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476903 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476918 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-node-log\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476935 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-env-overrides\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476954 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-kubelet\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.476981 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477000 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-cnibin\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477015 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/fba1e50b-81f4-438f-b056-3f8cbee7fad1-hosts-file\") pod \"node-resolver-hq77p\" (UID: \"fba1e50b-81f4-438f-b056-3f8cbee7fad1\") " pod="openshift-dns/node-resolver-hq77p" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477030 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7h2w5\" (UniqueName: \"kubernetes.io/projected/606df6ba-3dfe-48de-8890-9a5a0c030d23-kube-api-access-7h2w5\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477047 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7x88j\" (UniqueName: \"kubernetes.io/projected/874238ac-6c4c-40c9-ad22-1bec31020fb6-kube-api-access-7x88j\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477066 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477083 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-run-multus-certs\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477101 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477117 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a72940bb-614b-417f-9e8b-bcfddae31f96-serviceca\") pod \"node-ca-lgtw8\" (UID: \"a72940bb-614b-417f-9e8b-bcfddae31f96\") " pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477135 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477151 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/60ded650-b298-4115-8286-8969b94d4062-rootfs\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477168 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/372b2734-220f-4e91-98c1-dbb9d1042273-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477186 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-bin\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477202 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477218 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-cni-dir\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477236 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-systemd-units\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477250 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-var-lib-openvswitch\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477265 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-script-lib\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477279 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/372b2734-220f-4e91-98c1-dbb9d1042273-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477296 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d83a0d76-2d76-4202-a2f1-42b9ccb66802-cni-binary-copy\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477311 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-cnibin\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.477325 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-config\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.478060 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.478457 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.478726 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.478773 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.479054 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.479592 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.479634 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.479848 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.480177 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.480618 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.480643 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.480807 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.481355 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:03.981329728 +0000 UTC m=+90.270929671 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.481364 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.481046 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.481787 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.481823 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.481911 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.482400 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.478391 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.480767 4690 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.482968 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483305 4690 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483345 4690 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483363 4690 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483381 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483399 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483416 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483430 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483443 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483457 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483513 4690 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483528 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483568 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483585 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483600 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483653 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483974 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.483994 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484012 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484029 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484043 4690 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484057 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484070 4690 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484084 4690 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484099 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484111 4690 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484124 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484137 4690 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484150 4690 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484163 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484174 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484186 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484200 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484215 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484230 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484244 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484258 4690 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484272 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484285 4690 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484298 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484312 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484326 4690 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484338 4690 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484351 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484365 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484378 4690 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484391 4690 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484403 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484415 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484428 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484441 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484454 4690 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484466 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484478 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484493 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484510 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484524 4690 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484537 4690 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484550 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484562 4690 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484574 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484587 4690 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484598 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484614 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484627 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484642 4690 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484657 4690 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484672 4690 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484686 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484701 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484713 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484725 4690 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484738 4690 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484753 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484764 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484777 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484789 4690 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484802 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484813 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484826 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484838 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484856 4690 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484887 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484899 4690 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484914 4690 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484927 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484938 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484954 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484967 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484979 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.484993 4690 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485008 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485020 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485033 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485046 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485060 4690 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485073 4690 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485085 4690 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485097 4690 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485109 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485122 4690 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485134 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485147 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485159 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485171 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485187 4690 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485201 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485231 4690 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485244 4690 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485256 4690 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485268 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485280 4690 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485293 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485305 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485317 4690 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485331 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485343 4690 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485355 4690 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485367 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485379 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485391 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485493 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485508 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485522 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485534 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.485659 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.486518 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.486661 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.487024 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.489119 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.489464 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.489516 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.480894 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.489649 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.491029 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.495000 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.492926 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.487428 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.489795 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.490040 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.490292 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.491327 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.492024 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.495156 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:03.995134145 +0000 UTC m=+90.284734088 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.497088 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.497839 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.499419 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.499627 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.499614 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.500639 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.501309 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.502045 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.502069 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.502083 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.502134 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:04.002117315 +0000 UTC m=+90.291717258 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.504047 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.504067 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.504082 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.504128 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:04.004119244 +0000 UTC m=+90.293719177 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.508063 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.510822 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.511138 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.511656 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.511877 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.511902 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.512297 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.512443 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.518479 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.518636 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.519106 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.519110 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.519103 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.519266 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.519471 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.519484 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.519642 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.519905 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.520437 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.520663 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.520834 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.521255 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.521304 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.521385 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.521626 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.521633 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.522581 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.522607 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.522795 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.522841 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.522711 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.523705 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.524744 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.525021 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.525011 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.534392 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.536813 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.541540 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.542685 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.544786 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.553628 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.560037 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.566992 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.574389 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.574441 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.574454 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.574747 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.574776 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.579753 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.593674 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595675 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-socket-dir-parent\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595709 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-hostroot\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595748 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-ovn\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595770 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/372b2734-220f-4e91-98c1-dbb9d1042273-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595794 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l74sw\" (UniqueName: \"kubernetes.io/projected/372b2734-220f-4e91-98c1-dbb9d1042273-kube-api-access-l74sw\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595811 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-var-lib-cni-multus\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595828 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-etc-kubernetes\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595848 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595903 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-netd\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595924 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-var-lib-kubelet\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595943 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-daemon-config\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595964 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/60ded650-b298-4115-8286-8969b94d4062-mcd-auth-proxy-config\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.595982 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-system-cni-dir\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596001 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-os-release\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596021 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdt8x\" (UniqueName: \"kubernetes.io/projected/a72940bb-614b-417f-9e8b-bcfddae31f96-kube-api-access-jdt8x\") pod \"node-ca-lgtw8\" (UID: \"a72940bb-614b-417f-9e8b-bcfddae31f96\") " pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596038 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-slash\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596059 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-etc-openvswitch\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596077 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-log-socket\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596094 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/60ded650-b298-4115-8286-8969b94d4062-proxy-tls\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596091 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-etc-kubernetes\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596113 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-conf-dir\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596169 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-conf-dir\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596176 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-netns\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596201 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-netns\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596209 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-systemd\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596228 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-openvswitch\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596249 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-ovn-kubernetes\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596264 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-socket-dir-parent\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596296 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-var-lib-cni-bin\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596325 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-hostroot\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596351 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-ovn\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.596357 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: E0320 13:24:03.596407 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs podName:d459decc-f715-4636-bc35-963ae8133ec7 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:04.096389774 +0000 UTC m=+90.385989717 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs") pod "network-metrics-daemon-rpcmp" (UID: "d459decc-f715-4636-bc35-963ae8133ec7") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596566 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-netd\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596598 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-var-lib-kubelet\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596759 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-etc-openvswitch\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597281 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-daemon-config\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597332 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-log-socket\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597503 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-openvswitch\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597564 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-systemd\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597637 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-os-release\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597680 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-system-cni-dir\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597710 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-var-lib-cni-multus\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.596266 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-var-lib-cni-bin\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597918 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-ovn-kubernetes\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597875 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/60ded650-b298-4115-8286-8969b94d4062-mcd-auth-proxy-config\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.597987 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-slash\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598061 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598121 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598219 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/606df6ba-3dfe-48de-8890-9a5a0c030d23-cni-binary-copy\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598303 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovn-node-metrics-cert\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598378 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frghp\" (UniqueName: \"kubernetes.io/projected/60ded650-b298-4115-8286-8969b94d4062-kube-api-access-frghp\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598446 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-system-cni-dir\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598525 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-node-log\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598616 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-env-overrides\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598732 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-kubelet\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598868 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/606df6ba-3dfe-48de-8890-9a5a0c030d23-cni-binary-copy\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598882 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-system-cni-dir\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598892 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-node-log\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.598935 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-kubelet\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.599156 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-cnibin\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.599307 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-env-overrides\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.599689 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-cnibin\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.599909 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/fba1e50b-81f4-438f-b056-3f8cbee7fad1-hosts-file\") pod \"node-resolver-hq77p\" (UID: \"fba1e50b-81f4-438f-b056-3f8cbee7fad1\") " pod="openshift-dns/node-resolver-hq77p" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.600048 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7h2w5\" (UniqueName: \"kubernetes.io/projected/606df6ba-3dfe-48de-8890-9a5a0c030d23-kube-api-access-7h2w5\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.600131 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/fba1e50b-81f4-438f-b056-3f8cbee7fad1-hosts-file\") pod \"node-resolver-hq77p\" (UID: \"fba1e50b-81f4-438f-b056-3f8cbee7fad1\") " pod="openshift-dns/node-resolver-hq77p" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.599745 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/372b2734-220f-4e91-98c1-dbb9d1042273-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.600307 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7x88j\" (UniqueName: \"kubernetes.io/projected/874238ac-6c4c-40c9-ad22-1bec31020fb6-kube-api-access-7x88j\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.600430 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-run-multus-certs\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.600546 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a72940bb-614b-417f-9e8b-bcfddae31f96-serviceca\") pod \"node-ca-lgtw8\" (UID: \"a72940bb-614b-417f-9e8b-bcfddae31f96\") " pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.600874 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.600980 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/60ded650-b298-4115-8286-8969b94d4062-rootfs\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601096 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/372b2734-220f-4e91-98c1-dbb9d1042273-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601187 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-cni-dir\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601280 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-systemd-units\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601392 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-var-lib-openvswitch\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601492 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-bin\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601590 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601679 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-multus-cni-dir\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.600461 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-run-multus-certs\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601594 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-systemd-units\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601633 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-bin\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601657 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-var-lib-openvswitch\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601054 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/60ded650-b298-4115-8286-8969b94d4062-rootfs\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601702 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.601024 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602128 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d83a0d76-2d76-4202-a2f1-42b9ccb66802-cni-binary-copy\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602229 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-cnibin\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602324 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-config\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602431 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-script-lib\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602532 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/372b2734-220f-4e91-98c1-dbb9d1042273-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602629 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-run-netns\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602755 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gh6s\" (UniqueName: \"kubernetes.io/projected/d459decc-f715-4636-bc35-963ae8133ec7-kube-api-access-5gh6s\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602874 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4l4jb\" (UniqueName: \"kubernetes.io/projected/fba1e50b-81f4-438f-b056-3f8cbee7fad1-kube-api-access-4l4jb\") pod \"node-resolver-hq77p\" (UID: \"fba1e50b-81f4-438f-b056-3f8cbee7fad1\") " pod="openshift-dns/node-resolver-hq77p" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602968 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-tuning-conf-dir\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603072 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-run-k8s-cni-cncf-io\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603190 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55l66\" (UniqueName: \"kubernetes.io/projected/d83a0d76-2d76-4202-a2f1-42b9ccb66802-kube-api-access-55l66\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603284 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-os-release\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603377 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/606df6ba-3dfe-48de-8890-9a5a0c030d23-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603470 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a72940bb-614b-417f-9e8b-bcfddae31f96-host\") pod \"node-ca-lgtw8\" (UID: \"a72940bb-614b-417f-9e8b-bcfddae31f96\") " pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603577 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-cnibin\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603038 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d83a0d76-2d76-4202-a2f1-42b9ccb66802-cni-binary-copy\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.602883 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/372b2734-220f-4e91-98c1-dbb9d1042273-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603213 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/372b2734-220f-4e91-98c1-dbb9d1042273-env-overrides\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603691 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-script-lib\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.603548 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-config\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.604103 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-run-netns\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.604266 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a72940bb-614b-417f-9e8b-bcfddae31f96-serviceca\") pod \"node-ca-lgtw8\" (UID: \"a72940bb-614b-417f-9e8b-bcfddae31f96\") " pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.604602 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a72940bb-614b-417f-9e8b-bcfddae31f96-host\") pod \"node-ca-lgtw8\" (UID: \"a72940bb-614b-417f-9e8b-bcfddae31f96\") " pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.604730 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-os-release\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.604839 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/d83a0d76-2d76-4202-a2f1-42b9ccb66802-host-run-k8s-cni-cncf-io\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605045 4690 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605198 4690 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605281 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605337 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605396 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605449 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605525 4690 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605585 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605645 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605699 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605757 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605808 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605903 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605960 4690 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606027 4690 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606083 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606144 4690 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606198 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606250 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606309 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606366 4690 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606428 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606490 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606547 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606627 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606700 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606767 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606829 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606980 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607041 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607110 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607168 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607223 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607285 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607343 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607401 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607459 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607515 4690 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607577 4690 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607632 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607686 4690 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607743 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607803 4690 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607878 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.607970 4690 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608031 4690 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608108 4690 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608169 4690 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608227 4690 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608278 4690 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608337 4690 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608393 4690 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608447 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608504 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608557 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608613 4690 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608671 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608727 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608790 4690 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608849 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608929 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.608989 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.609046 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.609103 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.609162 4690 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.609221 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.609272 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.609328 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.609379 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.605997 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/606df6ba-3dfe-48de-8890-9a5a0c030d23-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606887 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/60ded650-b298-4115-8286-8969b94d4062-proxy-tls\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.606275 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/606df6ba-3dfe-48de-8890-9a5a0c030d23-tuning-conf-dir\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.610822 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.614564 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frghp\" (UniqueName: \"kubernetes.io/projected/60ded650-b298-4115-8286-8969b94d4062-kube-api-access-frghp\") pod \"machine-config-daemon-ftcqx\" (UID: \"60ded650-b298-4115-8286-8969b94d4062\") " pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.616404 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7h2w5\" (UniqueName: \"kubernetes.io/projected/606df6ba-3dfe-48de-8890-9a5a0c030d23-kube-api-access-7h2w5\") pod \"multus-additional-cni-plugins-t4t2r\" (UID: \"606df6ba-3dfe-48de-8890-9a5a0c030d23\") " pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.618356 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l74sw\" (UniqueName: \"kubernetes.io/projected/372b2734-220f-4e91-98c1-dbb9d1042273-kube-api-access-l74sw\") pod \"ovnkube-control-plane-749d76644c-qnv4x\" (UID: \"372b2734-220f-4e91-98c1-dbb9d1042273\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.618856 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55l66\" (UniqueName: \"kubernetes.io/projected/d83a0d76-2d76-4202-a2f1-42b9ccb66802-kube-api-access-55l66\") pod \"multus-pgtf4\" (UID: \"d83a0d76-2d76-4202-a2f1-42b9ccb66802\") " pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.619807 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdt8x\" (UniqueName: \"kubernetes.io/projected/a72940bb-614b-417f-9e8b-bcfddae31f96-kube-api-access-jdt8x\") pod \"node-ca-lgtw8\" (UID: \"a72940bb-614b-417f-9e8b-bcfddae31f96\") " pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.620510 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovn-node-metrics-cert\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.620832 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.624290 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gh6s\" (UniqueName: \"kubernetes.io/projected/d459decc-f715-4636-bc35-963ae8133ec7-kube-api-access-5gh6s\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.624587 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7x88j\" (UniqueName: \"kubernetes.io/projected/874238ac-6c4c-40c9-ad22-1bec31020fb6-kube-api-access-7x88j\") pod \"ovnkube-node-x2b7f\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.624928 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4l4jb\" (UniqueName: \"kubernetes.io/projected/fba1e50b-81f4-438f-b056-3f8cbee7fad1-kube-api-access-4l4jb\") pod \"node-resolver-hq77p\" (UID: \"fba1e50b-81f4-438f-b056-3f8cbee7fad1\") " pod="openshift-dns/node-resolver-hq77p" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.631447 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.641339 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.682961 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.683253 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.683343 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.683430 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.683520 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.716695 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.723484 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.732055 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.739509 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" Mar 20 13:24:03 crc kubenswrapper[4690]: W0320 13:24:03.744926 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-d3503d5311381266d412eb5a6fc2c93f8f6abf1d4ae5b726416574cc80d84c0d WatchSource:0}: Error finding container d3503d5311381266d412eb5a6fc2c93f8f6abf1d4ae5b726416574cc80d84c0d: Status 404 returned error can't find the container with id d3503d5311381266d412eb5a6fc2c93f8f6abf1d4ae5b726416574cc80d84c0d Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.751746 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-hq77p" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.760901 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-lgtw8" Mar 20 13:24:03 crc kubenswrapper[4690]: W0320 13:24:03.763169 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod606df6ba_3dfe_48de_8890_9a5a0c030d23.slice/crio-aca6bf32e42a45339b065c3d76bc16949a0faeb0e90eb95d51c5235ff8254348 WatchSource:0}: Error finding container aca6bf32e42a45339b065c3d76bc16949a0faeb0e90eb95d51c5235ff8254348: Status 404 returned error can't find the container with id aca6bf32e42a45339b065c3d76bc16949a0faeb0e90eb95d51c5235ff8254348 Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.773949 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-pgtf4" Mar 20 13:24:03 crc kubenswrapper[4690]: W0320 13:24:03.777714 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfba1e50b_81f4_438f_b056_3f8cbee7fad1.slice/crio-62f321349d01ab6254facadc9d588efed6d697335be4eaa5c1eb810bebc21295 WatchSource:0}: Error finding container 62f321349d01ab6254facadc9d588efed6d697335be4eaa5c1eb810bebc21295: Status 404 returned error can't find the container with id 62f321349d01ab6254facadc9d588efed6d697335be4eaa5c1eb810bebc21295 Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.782144 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-hq77p" event={"ID":"fba1e50b-81f4-438f-b056-3f8cbee7fad1","Type":"ContainerStarted","Data":"62f321349d01ab6254facadc9d588efed6d697335be4eaa5c1eb810bebc21295"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.784226 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" event={"ID":"606df6ba-3dfe-48de-8890-9a5a0c030d23","Type":"ContainerStarted","Data":"aca6bf32e42a45339b065c3d76bc16949a0faeb0e90eb95d51c5235ff8254348"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.785279 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.785363 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.785430 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.785490 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.785544 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.785776 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"d3503d5311381266d412eb5a6fc2c93f8f6abf1d4ae5b726416574cc80d84c0d"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.786957 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"5b92692763743bf9150b13b771ae45e50dbd98718dd3816609c5c5ae2137edec"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.787960 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"135c99ee62dfcd97809abf620c31d1daa3c7ad7a184a9604be361433c86274e2"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.788237 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" Mar 20 13:24:03 crc kubenswrapper[4690]: W0320 13:24:03.802063 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda72940bb_614b_417f_9e8b_bcfddae31f96.slice/crio-e882931c6ed22be99fb6b15eb55b2ed4be038667f62f478198f18bfbd5089664 WatchSource:0}: Error finding container e882931c6ed22be99fb6b15eb55b2ed4be038667f62f478198f18bfbd5089664: Status 404 returned error can't find the container with id e882931c6ed22be99fb6b15eb55b2ed4be038667f62f478198f18bfbd5089664 Mar 20 13:24:03 crc kubenswrapper[4690]: W0320 13:24:03.824397 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod372b2734_220f_4e91_98c1_dbb9d1042273.slice/crio-481d06ee036770bc292005b0e6939cb46eb4de4747f870819ee189c32e9d8185 WatchSource:0}: Error finding container 481d06ee036770bc292005b0e6939cb46eb4de4747f870819ee189c32e9d8185: Status 404 returned error can't find the container with id 481d06ee036770bc292005b0e6939cb46eb4de4747f870819ee189c32e9d8185 Mar 20 13:24:03 crc kubenswrapper[4690]: W0320 13:24:03.826405 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd83a0d76_2d76_4202_a2f1_42b9ccb66802.slice/crio-28f6701610e57df04b888ab587c876e5bb677d4642ee07aad87476c646921734 WatchSource:0}: Error finding container 28f6701610e57df04b888ab587c876e5bb677d4642ee07aad87476c646921734: Status 404 returned error can't find the container with id 28f6701610e57df04b888ab587c876e5bb677d4642ee07aad87476c646921734 Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.828560 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.834717 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:03 crc kubenswrapper[4690]: W0320 13:24:03.867469 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60ded650_b298_4115_8286_8969b94d4062.slice/crio-57ca654872c0f75a8f6f9f98553221eb00f34f4ead4f70b8705fcda8c3bfb202 WatchSource:0}: Error finding container 57ca654872c0f75a8f6f9f98553221eb00f34f4ead4f70b8705fcda8c3bfb202: Status 404 returned error can't find the container with id 57ca654872c0f75a8f6f9f98553221eb00f34f4ead4f70b8705fcda8c3bfb202 Mar 20 13:24:03 crc kubenswrapper[4690]: W0320 13:24:03.869005 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod874238ac_6c4c_40c9_ad22_1bec31020fb6.slice/crio-a3bc228fd25f99f1051e24b25de3bd09886a3dabb501f4c7b0ed39f61e1151e4 WatchSource:0}: Error finding container a3bc228fd25f99f1051e24b25de3bd09886a3dabb501f4c7b0ed39f61e1151e4: Status 404 returned error can't find the container with id a3bc228fd25f99f1051e24b25de3bd09886a3dabb501f4c7b0ed39f61e1151e4 Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.895680 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.895722 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.895736 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.895753 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.895765 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.997588 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.997622 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.997633 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.997646 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:03 crc kubenswrapper[4690]: I0320 13:24:03.997655 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:03Z","lastTransitionTime":"2026-03-20T13:24:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.014259 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.014392 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014402 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:24:05.014381677 +0000 UTC m=+91.303981630 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.014447 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.014480 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014541 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014588 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:05.014577881 +0000 UTC m=+91.304177824 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014602 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014635 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014649 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014703 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:05.014682444 +0000 UTC m=+91.304282457 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014762 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014781 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014795 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.014831 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:05.014821567 +0000 UTC m=+91.304421510 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.015473 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.015563 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.015599 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:05.015589046 +0000 UTC m=+91.305188989 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.101162 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.101202 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.101212 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.101229 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.101242 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.116265 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.116505 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.116611 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs podName:d459decc-f715-4636-bc35-963ae8133ec7 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:05.116574449 +0000 UTC m=+91.406174392 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs") pod "network-metrics-daemon-rpcmp" (UID: "d459decc-f715-4636-bc35-963ae8133ec7") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.206547 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.206598 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.206610 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.206627 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.206645 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.309558 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.309586 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.309595 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.309610 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.309621 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.412463 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.412506 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.412514 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.412528 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.412537 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.414341 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.414549 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.419061 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.419850 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.420608 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.421342 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.422023 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.422550 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.423222 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.424089 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.424766 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.425342 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.425892 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.426138 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.426605 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.428153 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.429131 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.429814 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.430509 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.431262 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.431793 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.432531 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.433288 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.434158 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.435043 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.435587 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.436593 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.437213 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.438702 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.439886 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.440576 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.441340 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.441915 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.443012 4690 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.443021 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.443395 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.446248 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.447347 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.448481 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.450636 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.451600 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.452298 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.452970 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.453701 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.454220 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.454853 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.455008 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.456703 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.457410 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.458259 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.458930 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.459581 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.460411 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.461616 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.462936 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.463696 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.465096 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.465761 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.467038 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.467922 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.478402 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.496296 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.509722 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.514785 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.514824 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.514836 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.514870 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.514883 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.519297 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.529774 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.538781 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.559337 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.569728 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.582196 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.591811 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.617102 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.617152 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.617164 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.617185 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.617204 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.719496 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.719556 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.719567 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.719584 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.719596 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.774163 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.774248 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.774260 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.774306 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.774319 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.789736 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.793168 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.793214 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.793226 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.793244 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.793257 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.794326 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3" exitCode=0 Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.794426 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.794453 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"a3bc228fd25f99f1051e24b25de3bd09886a3dabb501f4c7b0ed39f61e1151e4"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.796536 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-lgtw8" event={"ID":"a72940bb-614b-417f-9e8b-bcfddae31f96","Type":"ContainerStarted","Data":"eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.796560 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-lgtw8" event={"ID":"a72940bb-614b-417f-9e8b-bcfddae31f96","Type":"ContainerStarted","Data":"e882931c6ed22be99fb6b15eb55b2ed4be038667f62f478198f18bfbd5089664"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.798400 4690 generic.go:334] "Generic (PLEG): container finished" podID="606df6ba-3dfe-48de-8890-9a5a0c030d23" containerID="7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33" exitCode=0 Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.798434 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" event={"ID":"606df6ba-3dfe-48de-8890-9a5a0c030d23","Type":"ContainerDied","Data":"7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.802109 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" event={"ID":"372b2734-220f-4e91-98c1-dbb9d1042273","Type":"ContainerStarted","Data":"f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.802170 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" event={"ID":"372b2734-220f-4e91-98c1-dbb9d1042273","Type":"ContainerStarted","Data":"89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.802186 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" event={"ID":"372b2734-220f-4e91-98c1-dbb9d1042273","Type":"ContainerStarted","Data":"481d06ee036770bc292005b0e6939cb46eb4de4747f870819ee189c32e9d8185"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.804642 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.804679 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.804700 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"57ca654872c0f75a8f6f9f98553221eb00f34f4ead4f70b8705fcda8c3bfb202"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.808344 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.810687 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.810932 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.812580 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgtf4" event={"ID":"d83a0d76-2d76-4202-a2f1-42b9ccb66802","Type":"ContainerStarted","Data":"574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.812774 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgtf4" event={"ID":"d83a0d76-2d76-4202-a2f1-42b9ccb66802","Type":"ContainerStarted","Data":"28f6701610e57df04b888ab587c876e5bb677d4642ee07aad87476c646921734"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.814237 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-hq77p" event={"ID":"fba1e50b-81f4-438f-b056-3f8cbee7fad1","Type":"ContainerStarted","Data":"33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1"} Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.815056 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.820248 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.820304 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.820319 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.820341 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.820357 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.828310 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.838323 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.841947 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.841971 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.841981 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.841994 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.842004 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.851978 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.853812 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.858335 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.858369 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.858377 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.858390 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.858399 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.865956 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.872273 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: E0320 13:24:04.872438 4690 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.874724 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.874756 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.874771 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.874785 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.874796 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.878817 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.890290 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.903207 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.912903 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.927339 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.941389 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.959529 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.978033 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.978086 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.978097 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.978114 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.978123 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:04Z","lastTransitionTime":"2026-03-20T13:24:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.978216 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:04 crc kubenswrapper[4690]: I0320 13:24:04.991275 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.002675 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.019622 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.023605 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.023714 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.023768 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:24:07.02374776 +0000 UTC m=+93.313347703 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.023810 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.023866 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.023911 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.023933 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.023967 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.023984 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.024036 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.024045 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:07.024025357 +0000 UTC m=+93.313625360 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.023987 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.024069 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:07.024059878 +0000 UTC m=+93.313659911 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.024096 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:07.024086519 +0000 UTC m=+93.313686562 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.024124 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.024134 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.024144 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.024165 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:07.02415932 +0000 UTC m=+93.313759263 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.039903 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.055017 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.069684 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.079975 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.080020 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.080032 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.080048 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.080060 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:05Z","lastTransitionTime":"2026-03-20T13:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.083109 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.096128 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.114390 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.125031 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.125236 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.125329 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs podName:d459decc-f715-4636-bc35-963ae8133ec7 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:07.125306317 +0000 UTC m=+93.414906260 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs") pod "network-metrics-daemon-rpcmp" (UID: "d459decc-f715-4636-bc35-963ae8133ec7") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.131838 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.144969 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.162585 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.178342 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.185416 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.185476 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.185488 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.185507 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.185519 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:05Z","lastTransitionTime":"2026-03-20T13:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.199577 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.212639 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.226017 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.237590 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.287913 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.287952 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.287963 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.287978 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.287988 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:05Z","lastTransitionTime":"2026-03-20T13:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.390754 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.390790 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.390799 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.390813 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.390825 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:05Z","lastTransitionTime":"2026-03-20T13:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.413574 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.413682 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.413711 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.414177 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.414207 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:05 crc kubenswrapper[4690]: E0320 13:24:05.414047 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.492785 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.492846 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.492884 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.492909 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.492922 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:05Z","lastTransitionTime":"2026-03-20T13:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.595629 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.595690 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.595710 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.595736 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.595755 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:05Z","lastTransitionTime":"2026-03-20T13:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.699213 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.699265 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.699280 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.699300 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.699329 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:05Z","lastTransitionTime":"2026-03-20T13:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.801102 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.801144 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.801154 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.801172 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.801182 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:05Z","lastTransitionTime":"2026-03-20T13:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.820221 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.820272 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.820289 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.820317 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.821838 4690 generic.go:334] "Generic (PLEG): container finished" podID="606df6ba-3dfe-48de-8890-9a5a0c030d23" containerID="e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338" exitCode=0 Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.822017 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" event={"ID":"606df6ba-3dfe-48de-8890-9a5a0c030d23","Type":"ContainerDied","Data":"e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.842572 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.858975 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.889888 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.904691 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.904739 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.904749 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.904765 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.904776 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:05Z","lastTransitionTime":"2026-03-20T13:24:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.905036 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.924227 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.938319 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.953751 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.974297 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:05 crc kubenswrapper[4690]: I0320 13:24:05.984901 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:05Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.002422 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.011057 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.011093 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.011104 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.011122 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.011136 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.016007 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.050199 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.085895 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.114298 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.114342 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.114356 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.114374 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.114388 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.123627 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.216417 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.216461 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.216472 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.216489 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.216499 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.319227 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.319260 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.319268 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.319281 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.319291 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.414390 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:06 crc kubenswrapper[4690]: E0320 13:24:06.414542 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.421783 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.421840 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.421888 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.421901 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.421911 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.523658 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.523702 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.523710 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.523726 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.523735 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.627141 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.627188 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.627202 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.627229 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.627241 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.729939 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.729988 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.730012 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.730030 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.730044 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.829998 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.830062 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.832378 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.832442 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.832478 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.832507 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.832530 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.833432 4690 generic.go:334] "Generic (PLEG): container finished" podID="606df6ba-3dfe-48de-8890-9a5a0c030d23" containerID="9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c" exitCode=0 Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.833475 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" event={"ID":"606df6ba-3dfe-48de-8890-9a5a0c030d23","Type":"ContainerDied","Data":"9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.835025 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.854561 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.877666 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.894001 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.921725 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.935039 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.935528 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.935562 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.935571 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.935586 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.935599 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:06Z","lastTransitionTime":"2026-03-20T13:24:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.950969 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.964898 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.978006 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:06 crc kubenswrapper[4690]: I0320 13:24:06.989261 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.006951 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.019607 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.028477 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.040929 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.040985 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.040999 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.041018 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.041035 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.041938 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.049481 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.049672 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:24:11.049641101 +0000 UTC m=+97.339241044 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.049952 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.050217 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.050460 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.050674 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.050094 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.050971 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:11.050961613 +0000 UTC m=+97.340561556 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.050421 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.051111 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.050620 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.051212 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.051303 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:11.051273431 +0000 UTC m=+97.340873434 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.051429 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:11.051420224 +0000 UTC m=+97.341020157 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.050798 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.051590 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.051647 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.051783 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:11.051775493 +0000 UTC m=+97.341375436 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.055056 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.069112 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.082533 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.096250 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.109633 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.121178 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.131243 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.140957 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.144500 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.144670 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.144769 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.144877 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.145015 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.152103 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.152267 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.152385 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs podName:d459decc-f715-4636-bc35-963ae8133ec7 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:11.152359426 +0000 UTC m=+97.441959419 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs") pod "network-metrics-daemon-rpcmp" (UID: "d459decc-f715-4636-bc35-963ae8133ec7") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.161245 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.171500 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.182296 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.193793 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.202436 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.213523 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.225527 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.247615 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.247677 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.247699 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.247731 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.247752 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.350224 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.350274 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.350285 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.350302 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.350314 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.413827 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.413908 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.414316 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.413925 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.414348 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:07 crc kubenswrapper[4690]: E0320 13:24:07.414563 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.454050 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.454082 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.454096 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.454115 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.454128 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.557153 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.557234 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.557247 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.557262 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.557275 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.659875 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.659916 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.659925 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.659958 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.659966 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.762406 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.762453 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.762465 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.762485 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.762502 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.842831 4690 generic.go:334] "Generic (PLEG): container finished" podID="606df6ba-3dfe-48de-8890-9a5a0c030d23" containerID="4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3" exitCode=0 Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.842931 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" event={"ID":"606df6ba-3dfe-48de-8890-9a5a0c030d23","Type":"ContainerDied","Data":"4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.864735 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.864774 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.864793 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.864815 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.864833 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.875261 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.900122 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.929404 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.944050 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.961112 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.969671 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.969745 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.969766 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.969792 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.969813 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:07Z","lastTransitionTime":"2026-03-20T13:24:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.974696 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:07 crc kubenswrapper[4690]: I0320 13:24:07.986496 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:07Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.004085 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.016202 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.025737 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.038489 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.052703 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.064923 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.071648 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.071686 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.071701 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.071721 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.071744 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.077615 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.174350 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.174405 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.174416 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.174439 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.174453 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.277261 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.277542 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.277641 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.277734 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.277794 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.380528 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.380586 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.380598 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.380616 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.380629 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.413462 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:08 crc kubenswrapper[4690]: E0320 13:24:08.413893 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.482824 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.482892 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.482906 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.482924 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.482934 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.586203 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.586246 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.586258 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.586272 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.586284 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.689516 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.689913 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.689979 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.690087 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.690158 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.793299 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.793344 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.793357 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.793375 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.793389 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.855960 4690 generic.go:334] "Generic (PLEG): container finished" podID="606df6ba-3dfe-48de-8890-9a5a0c030d23" containerID="26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b" exitCode=0 Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.856205 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" event={"ID":"606df6ba-3dfe-48de-8890-9a5a0c030d23","Type":"ContainerDied","Data":"26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.866394 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.872288 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.885193 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.896321 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.896354 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.896367 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.896384 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.896395 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.897665 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.908722 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.920106 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.931566 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.941613 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.954095 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.965330 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:08 crc kubenswrapper[4690]: I0320 13:24:08.975374 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:08Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:08.998484 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:08.998531 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:08.998543 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:08.998559 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:08.998569 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:08Z","lastTransitionTime":"2026-03-20T13:24:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.002984 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.014403 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.025718 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.045150 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.101205 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.101233 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.101242 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.101257 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.101267 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:09Z","lastTransitionTime":"2026-03-20T13:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.204191 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.204244 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.204261 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.204279 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.204290 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:09Z","lastTransitionTime":"2026-03-20T13:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.308167 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.308210 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.308222 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.308239 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.308251 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:09Z","lastTransitionTime":"2026-03-20T13:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.411798 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.411830 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.411840 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.411878 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.411891 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:09Z","lastTransitionTime":"2026-03-20T13:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.414241 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:09 crc kubenswrapper[4690]: E0320 13:24:09.414329 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.414356 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.414414 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:09 crc kubenswrapper[4690]: E0320 13:24:09.414446 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:09 crc kubenswrapper[4690]: E0320 13:24:09.414530 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.514823 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.514894 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.514908 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.514928 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.514943 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:09Z","lastTransitionTime":"2026-03-20T13:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.617429 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.617485 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.617503 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.617526 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.617541 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:09Z","lastTransitionTime":"2026-03-20T13:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.719107 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.719138 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.719147 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.719161 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.719169 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:09Z","lastTransitionTime":"2026-03-20T13:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.821180 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.821207 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.821215 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.821227 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.821235 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:09Z","lastTransitionTime":"2026-03-20T13:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.875402 4690 generic.go:334] "Generic (PLEG): container finished" podID="606df6ba-3dfe-48de-8890-9a5a0c030d23" containerID="7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004" exitCode=0 Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.875482 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" event={"ID":"606df6ba-3dfe-48de-8890-9a5a0c030d23","Type":"ContainerDied","Data":"7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.891432 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.906837 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.920526 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.923882 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.924271 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.924285 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.924299 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.924319 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:09Z","lastTransitionTime":"2026-03-20T13:24:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.934378 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.948711 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.959513 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.971969 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.982656 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:09 crc kubenswrapper[4690]: I0320 13:24:09.995110 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:09Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.008599 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.019667 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.026177 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.026215 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.026226 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.026241 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.026251 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.039678 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.051523 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.064257 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.128291 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.128321 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.128329 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.128340 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.128351 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.230358 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.230394 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.230401 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.230415 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.230424 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.333569 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.333623 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.333638 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.333661 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.333676 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.413754 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:10 crc kubenswrapper[4690]: E0320 13:24:10.413896 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.436341 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.436382 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.436391 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.436407 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.436417 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.539064 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.539102 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.539114 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.539130 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.539142 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.642923 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.642993 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.643007 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.643028 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.643041 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.748135 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.748179 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.748188 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.748203 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.748212 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.854837 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.854929 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.854942 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.854961 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.854978 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.885509 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.885980 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.886017 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.892115 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" event={"ID":"606df6ba-3dfe-48de-8890-9a5a0c030d23","Type":"ContainerStarted","Data":"4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.902730 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.916126 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.935372 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.948056 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.955392 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.956755 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.956788 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.956798 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.956810 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.956819 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:10Z","lastTransitionTime":"2026-03-20T13:24:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.977647 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:10 crc kubenswrapper[4690]: I0320 13:24:10.993840 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:10Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.003071 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.027330 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.041399 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.054622 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.058710 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.058787 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.058803 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.058838 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.058882 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.072316 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.083228 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.092850 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.094649 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.094761 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.094798 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.094820 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.094856 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:24:19.094825772 +0000 UTC m=+105.384425715 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.094894 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.094932 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.094946 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.094957 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.094974 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.095001 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:19.094990476 +0000 UTC m=+105.384590419 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.094983 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.094991 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.095108 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.095122 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.095064 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:19.095039827 +0000 UTC m=+105.384639830 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.095185 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:19.09516832 +0000 UTC m=+105.384768313 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.095231 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:19.095220792 +0000 UTC m=+105.384820805 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.105025 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.115106 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.123890 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.138034 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.149934 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.161828 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.161916 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.161927 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.161971 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.161984 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.162590 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.173424 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.183359 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.192408 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.195994 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.196127 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.196173 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs podName:d459decc-f715-4636-bc35-963ae8133ec7 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:19.196161333 +0000 UTC m=+105.485761276 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs") pod "network-metrics-daemon-rpcmp" (UID: "d459decc-f715-4636-bc35-963ae8133ec7") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.216454 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.226060 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.236516 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.245465 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.257168 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.264013 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.264041 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.264050 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.264064 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.264073 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.270589 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.366081 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.366110 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.366118 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.366133 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.366146 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.414220 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.414360 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.414377 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.414516 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.414673 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:11 crc kubenswrapper[4690]: E0320 13:24:11.414901 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.470472 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.470558 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.470576 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.470633 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.470650 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.573475 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.573539 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.573563 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.573587 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.573606 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.677192 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.677244 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.677255 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.677276 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.677290 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.779963 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.780008 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.780026 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.780049 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.780066 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.883082 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.883118 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.883130 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.883147 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.883158 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.896215 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.917366 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.935778 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.954053 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.969980 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.981787 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.986040 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.986229 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.986336 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.986440 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.986538 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:11Z","lastTransitionTime":"2026-03-20T13:24:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:11 crc kubenswrapper[4690]: I0320 13:24:11.993879 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:11Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.004431 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:12Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.014062 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:12Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.024806 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:12Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.039756 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:12Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.057692 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:12Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.079742 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:12Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.089689 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.089980 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.090076 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.090178 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.090266 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:12Z","lastTransitionTime":"2026-03-20T13:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.114770 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:12Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.132215 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:12Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.169825 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:12Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.193472 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.193522 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.193534 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.193551 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.193563 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:12Z","lastTransitionTime":"2026-03-20T13:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.295557 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.295589 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.295598 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.295611 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.295620 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:12Z","lastTransitionTime":"2026-03-20T13:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.398089 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.398137 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.398144 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.398158 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.398167 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:12Z","lastTransitionTime":"2026-03-20T13:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.415123 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:12 crc kubenswrapper[4690]: E0320 13:24:12.415207 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.501091 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.501155 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.501167 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.501209 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.501223 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:12Z","lastTransitionTime":"2026-03-20T13:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.603883 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.603949 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.603959 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.603978 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.603989 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:12Z","lastTransitionTime":"2026-03-20T13:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.706581 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.706694 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.706712 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.706743 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.706760 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:12Z","lastTransitionTime":"2026-03-20T13:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.809665 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.809709 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.809718 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.809730 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.809739 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:12Z","lastTransitionTime":"2026-03-20T13:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.912247 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.912277 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.912285 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.912297 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:12 crc kubenswrapper[4690]: I0320 13:24:12.912307 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:12Z","lastTransitionTime":"2026-03-20T13:24:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.015804 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.015846 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.015876 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.015893 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.015904 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.117731 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.117774 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.117786 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.117806 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.117820 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.221198 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.221240 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.221251 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.221268 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.221281 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.326582 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.326668 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.326689 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.326724 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.326750 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.414130 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:13 crc kubenswrapper[4690]: E0320 13:24:13.414315 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.414385 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.414431 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:13 crc kubenswrapper[4690]: E0320 13:24:13.414597 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:13 crc kubenswrapper[4690]: E0320 13:24:13.414641 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.428528 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.428583 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.428599 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.428617 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.428628 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.531754 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.531827 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.531931 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.531967 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.531989 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.635137 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.635199 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.635221 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.635249 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.635270 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.737880 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.737949 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.737966 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.737990 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.738005 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.840446 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.840477 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.840485 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.840499 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.840507 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.904326 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/0.log" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.909039 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c" exitCode=1 Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.909160 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.910360 4690 scope.go:117] "RemoveContainer" containerID="1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.925515 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:13Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.942552 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.942582 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.942590 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.942628 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.942637 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:13Z","lastTransitionTime":"2026-03-20T13:24:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.945001 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:13Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.959188 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:13Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.973957 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:13Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.986925 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:13Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:13 crc kubenswrapper[4690]: I0320 13:24:13.998717 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:13Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.010783 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.024492 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.035227 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.044935 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.044974 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.044982 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.044997 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.045006 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.048403 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.061533 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.072227 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.095700 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:13Z\\\",\\\"message\\\":\\\"topping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0320 13:24:13.088160 6506 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0320 13:24:13.088236 6506 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0320 13:24:13.088529 6506 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:13.088542 6506 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:13.088567 6506 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 13:24:13.088585 6506 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 13:24:13.088591 6506 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 13:24:13.088610 6506 factory.go:656] Stopping watch factory\\\\nI0320 13:24:13.088624 6506 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:13.088648 6506 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:13.088657 6506 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 13:24:13.088662 6506 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0320 13:24:13.088667 6506 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 13\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.107536 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.147696 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.147729 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.147739 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.147753 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.147761 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.274730 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.274772 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.274783 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.274799 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.274812 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.377580 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.377641 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.377666 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.377701 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.377724 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.414341 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:14 crc kubenswrapper[4690]: E0320 13:24:14.414602 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.432756 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.451069 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.465754 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.479971 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.480006 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.480016 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.480031 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.480042 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.482043 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.501162 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.518455 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.547141 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.573469 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.581388 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.581426 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.581438 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.581452 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.581463 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.591812 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.609443 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:13Z\\\",\\\"message\\\":\\\"topping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0320 13:24:13.088160 6506 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0320 13:24:13.088236 6506 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0320 13:24:13.088529 6506 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:13.088542 6506 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:13.088567 6506 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 13:24:13.088585 6506 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 13:24:13.088591 6506 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 13:24:13.088610 6506 factory.go:656] Stopping watch factory\\\\nI0320 13:24:13.088624 6506 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:13.088648 6506 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:13.088657 6506 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 13:24:13.088662 6506 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0320 13:24:13.088667 6506 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 13\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.620133 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.634350 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.645934 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.663057 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.683800 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.683828 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.683837 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.683875 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.683889 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.786361 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.786398 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.786409 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.786422 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.786431 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.888803 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.888860 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.888869 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.888883 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.888892 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.894647 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.894679 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.894691 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.894707 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.894717 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: E0320 13:24:14.906057 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.909000 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.909019 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.909026 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.909038 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.909049 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.914010 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/0.log" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.917671 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c"} Mar 20 13:24:14 crc kubenswrapper[4690]: E0320 13:24:14.929979 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.933777 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.933823 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.933833 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.933889 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.933903 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.935666 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: E0320 13:24:14.948014 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.949092 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.952253 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.952285 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.952292 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.952307 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.952316 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.964599 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: E0320 13:24:14.964943 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.968332 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.968365 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.968374 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.968387 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.968397 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.978745 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: E0320 13:24:14.980313 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:14Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: E0320 13:24:14.980588 4690 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.989614 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.994928 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.994975 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.994988 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.995005 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:14 crc kubenswrapper[4690]: I0320 13:24:14.995017 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:14Z","lastTransitionTime":"2026-03-20T13:24:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.005633 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.016098 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.038011 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:13Z\\\",\\\"message\\\":\\\"topping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0320 13:24:13.088160 6506 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0320 13:24:13.088236 6506 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0320 13:24:13.088529 6506 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:13.088542 6506 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:13.088567 6506 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 13:24:13.088585 6506 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 13:24:13.088591 6506 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 13:24:13.088610 6506 factory.go:656] Stopping watch factory\\\\nI0320 13:24:13.088624 6506 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:13.088648 6506 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:13.088657 6506 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 13:24:13.088662 6506 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0320 13:24:13.088667 6506 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 13\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.049827 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.061424 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.074264 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.084074 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.097468 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.097521 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.097537 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.097554 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.097569 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:15Z","lastTransitionTime":"2026-03-20T13:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.098516 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.110220 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.199640 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.199696 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.199709 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.199730 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.199745 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:15Z","lastTransitionTime":"2026-03-20T13:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.302428 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.302916 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.302935 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.302965 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.302984 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:15Z","lastTransitionTime":"2026-03-20T13:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.405585 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.405633 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.405644 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.405663 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.405675 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:15Z","lastTransitionTime":"2026-03-20T13:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.414122 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.414220 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:15 crc kubenswrapper[4690]: E0320 13:24:15.414240 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.414295 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:15 crc kubenswrapper[4690]: E0320 13:24:15.414444 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:15 crc kubenswrapper[4690]: E0320 13:24:15.414560 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.508299 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.508327 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.508338 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.508351 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.508360 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:15Z","lastTransitionTime":"2026-03-20T13:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.611600 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.611680 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.611698 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.611726 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.611743 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:15Z","lastTransitionTime":"2026-03-20T13:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.714468 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.714526 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.714543 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.714566 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.714584 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:15Z","lastTransitionTime":"2026-03-20T13:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.817983 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.818044 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.818063 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.818088 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.818105 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:15Z","lastTransitionTime":"2026-03-20T13:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.920302 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.920428 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.920446 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.920470 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.920489 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:15Z","lastTransitionTime":"2026-03-20T13:24:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.924636 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/1.log" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.925199 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/0.log" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.928091 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c" exitCode=1 Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.928138 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c"} Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.928175 4690 scope.go:117] "RemoveContainer" containerID="1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.928947 4690 scope.go:117] "RemoveContainer" containerID="765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c" Mar 20 13:24:15 crc kubenswrapper[4690]: E0320 13:24:15.929126 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.947049 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.963691 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.977203 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:15 crc kubenswrapper[4690]: I0320 13:24:15.989275 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:15Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.003837 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.016780 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.026877 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.037683 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.047371 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.051043 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.051094 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.051108 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.051132 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.051148 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.059178 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.084030 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1084aa472cc610a895143638fb5023d7e86489353850dc18a02d5420bbb2bd6c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:13Z\\\",\\\"message\\\":\\\"topping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0320 13:24:13.088160 6506 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI0320 13:24:13.088236 6506 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0320 13:24:13.088529 6506 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:13.088542 6506 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:13.088567 6506 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0320 13:24:13.088585 6506 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0320 13:24:13.088591 6506 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0320 13:24:13.088610 6506 factory.go:656] Stopping watch factory\\\\nI0320 13:24:13.088624 6506 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:13.088648 6506 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:13.088657 6506 handler.go:208] Removed *v1.Node event handler 7\\\\nI0320 13:24:13.088662 6506 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0320 13:24:13.088667 6506 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0320 13\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:15Z\\\",\\\"message\\\":\\\"roller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z]\\\\nI0320 13:24:14.918128 6641 services_controller.go:451] Built service openshift-machine-config-operator/machine-config-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Router\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:14Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.095333 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.111637 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.122839 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.154429 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.154501 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.154524 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.154552 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.154573 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.257653 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.257703 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.257717 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.257736 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.257752 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.362120 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.362174 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.362188 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.362210 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.362250 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.414348 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:16 crc kubenswrapper[4690]: E0320 13:24:16.414564 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.437592 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.465934 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.466004 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.466029 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.466063 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.466088 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.570775 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.570836 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.570873 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.570898 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.570914 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.674537 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.674611 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.674644 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.674672 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.674694 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.777879 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.777943 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.777961 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.777986 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.778006 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.881539 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.881586 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.881597 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.881614 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.881625 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.933349 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/1.log" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.938545 4690 scope.go:117] "RemoveContainer" containerID="765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c" Mar 20 13:24:16 crc kubenswrapper[4690]: E0320 13:24:16.938707 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.955487 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.973253 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.984459 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.984844 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.984967 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.985076 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.985221 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:16Z","lastTransitionTime":"2026-03-20T13:24:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:16 crc kubenswrapper[4690]: I0320 13:24:16.991814 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:16Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.006493 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.021090 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.037225 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.051364 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.072066 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:15Z\\\",\\\"message\\\":\\\"roller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z]\\\\nI0320 13:24:14.918128 6641 services_controller.go:451] Built service openshift-machine-config-operator/machine-config-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Router\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.083549 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.087350 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.087375 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.087386 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.087401 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.087413 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:17Z","lastTransitionTime":"2026-03-20T13:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.099028 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.115396 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.129095 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.149716 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.164781 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.182264 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.189756 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.189797 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.189809 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.189826 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.189837 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:17Z","lastTransitionTime":"2026-03-20T13:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.292222 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.292254 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.292262 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.292276 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.292284 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:17Z","lastTransitionTime":"2026-03-20T13:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.395421 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.395460 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.395471 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.395485 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.395496 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:17Z","lastTransitionTime":"2026-03-20T13:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.413470 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.413506 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.413542 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:17 crc kubenswrapper[4690]: E0320 13:24:17.413602 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:17 crc kubenswrapper[4690]: E0320 13:24:17.413780 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:17 crc kubenswrapper[4690]: E0320 13:24:17.414577 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.431781 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.433827 4690 scope.go:117] "RemoveContainer" containerID="b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.498005 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.498065 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.498082 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.498106 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.498123 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:17Z","lastTransitionTime":"2026-03-20T13:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.601223 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.601261 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.601272 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.601290 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.601303 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:17Z","lastTransitionTime":"2026-03-20T13:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.703369 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.703430 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.703449 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.703473 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.703491 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:17Z","lastTransitionTime":"2026-03-20T13:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.805503 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.805537 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.805545 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.805557 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.805567 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:17Z","lastTransitionTime":"2026-03-20T13:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.908287 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.908334 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.908343 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.908358 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.908368 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:17Z","lastTransitionTime":"2026-03-20T13:24:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.944328 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.946936 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710"} Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.947211 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.965474 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.982641 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:17 crc kubenswrapper[4690]: I0320 13:24:17.998739 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:17Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.010527 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.010567 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.010585 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.010599 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.010611 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.012654 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.024435 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.034661 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.050174 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.064540 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.077944 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.088779 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.097438 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.113483 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.113533 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.113542 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.113557 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.113568 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.118335 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:15Z\\\",\\\"message\\\":\\\"roller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z]\\\\nI0320 13:24:14.918128 6641 services_controller.go:451] Built service openshift-machine-config-operator/machine-config-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Router\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.129414 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.143359 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.155161 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.177337 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:18Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.216259 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.216423 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.216434 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.216448 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.216457 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.318761 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.318816 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.318832 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.318884 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.318930 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.413990 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:18 crc kubenswrapper[4690]: E0320 13:24:18.414187 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.420776 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.420821 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.420832 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.420868 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.420882 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.523825 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.523895 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.523911 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.523932 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.523949 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.626311 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.626348 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.626357 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.626371 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.626380 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.728379 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.728426 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.728438 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.728455 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.728470 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.831781 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.831834 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.831881 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.831904 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.831921 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.935272 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.935335 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.935353 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.935376 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:18 crc kubenswrapper[4690]: I0320 13:24:18.935394 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:18Z","lastTransitionTime":"2026-03-20T13:24:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.037722 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.037796 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.037819 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.037894 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.037920 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.140723 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.140773 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.140810 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.140828 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.140840 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.192071 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.192223 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.192301 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192347 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:24:35.192309579 +0000 UTC m=+121.481909562 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.192410 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192440 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192520 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:35.192498243 +0000 UTC m=+121.482098216 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.192550 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192625 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192631 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192659 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192679 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192664 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:35.192652037 +0000 UTC m=+121.482252020 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192743 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:35.192728409 +0000 UTC m=+121.482328382 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192755 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192784 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192806 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.192912 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:35.192887153 +0000 UTC m=+121.482487126 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.244492 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.244593 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.244610 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.244633 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.244650 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.293055 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.293228 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.293318 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs podName:d459decc-f715-4636-bc35-963ae8133ec7 nodeName:}" failed. No retries permitted until 2026-03-20 13:24:35.293297441 +0000 UTC m=+121.582897384 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs") pod "network-metrics-daemon-rpcmp" (UID: "d459decc-f715-4636-bc35-963ae8133ec7") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.347174 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.347230 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.347245 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.347267 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.347282 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.413626 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.413756 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.413803 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.413819 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.413859 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:19 crc kubenswrapper[4690]: E0320 13:24:19.414024 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.449987 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.450031 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.450043 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.450058 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.450071 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.552791 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.552893 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.552908 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.552926 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.552942 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.655279 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.655369 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.655395 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.655428 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.655452 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.758558 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.758631 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.758647 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.758671 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.758688 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.861440 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.861494 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.861510 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.861535 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.861552 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.964758 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.964899 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.964926 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.964952 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:19 crc kubenswrapper[4690]: I0320 13:24:19.964969 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:19Z","lastTransitionTime":"2026-03-20T13:24:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.068349 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.068409 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.068424 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.068446 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.068461 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.172589 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.172658 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.172673 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.172696 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.172712 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.276316 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.276382 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.276400 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.276425 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.276443 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.379800 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.379879 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.379905 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.379928 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.379941 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.414512 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:20 crc kubenswrapper[4690]: E0320 13:24:20.414773 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.482954 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.482991 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.483001 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.483016 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.483026 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.586089 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.586172 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.586195 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.586225 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.586249 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.690222 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.690321 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.690348 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.690381 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.690406 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.793895 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.793957 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.793975 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.793999 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.794017 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.896425 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.896484 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.896497 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.896511 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.896522 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.998715 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.998764 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.998782 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.998805 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:20 crc kubenswrapper[4690]: I0320 13:24:20.998822 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:20Z","lastTransitionTime":"2026-03-20T13:24:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.102070 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.102148 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.102178 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.102206 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.102249 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:21Z","lastTransitionTime":"2026-03-20T13:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.204896 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.204935 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.204945 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.204961 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.204972 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:21Z","lastTransitionTime":"2026-03-20T13:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.308340 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.308390 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.308406 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.308430 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.308447 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:21Z","lastTransitionTime":"2026-03-20T13:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.418349 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.418401 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.418431 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.418393 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.418460 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.418463 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.418538 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:21 crc kubenswrapper[4690]: E0320 13:24:21.418585 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.418612 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:21Z","lastTransitionTime":"2026-03-20T13:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:21 crc kubenswrapper[4690]: E0320 13:24:21.418774 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:21 crc kubenswrapper[4690]: E0320 13:24:21.418893 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.522475 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.522542 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.522559 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.522584 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.522601 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:21Z","lastTransitionTime":"2026-03-20T13:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.625668 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.626083 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.626250 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.626385 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.626524 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:21Z","lastTransitionTime":"2026-03-20T13:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.730441 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.730486 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.730498 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.730516 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.730529 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:21Z","lastTransitionTime":"2026-03-20T13:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.833646 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.833724 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.833748 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.833781 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.833806 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:21Z","lastTransitionTime":"2026-03-20T13:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.937151 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.937194 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.937205 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.937243 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:21 crc kubenswrapper[4690]: I0320 13:24:21.937257 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:21Z","lastTransitionTime":"2026-03-20T13:24:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.039993 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.040038 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.040049 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.040066 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.040078 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.142681 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.142744 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.142758 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.142780 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.142795 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.245490 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.245536 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.245549 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.245568 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.245580 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.348975 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.349071 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.349106 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.349140 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.349168 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.414617 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:22 crc kubenswrapper[4690]: E0320 13:24:22.414876 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.452077 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.452191 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.452215 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.452250 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.452275 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.555146 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.555206 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.555223 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.555242 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.555255 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.658631 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.658674 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.658687 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.658705 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.658717 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.760802 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.760928 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.760961 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.760992 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.761018 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.863714 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.863768 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.863785 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.863806 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.863819 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.966793 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.966839 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.966921 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.966938 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:22 crc kubenswrapper[4690]: I0320 13:24:22.966950 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:22Z","lastTransitionTime":"2026-03-20T13:24:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.070630 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.070704 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.070723 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.070748 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.070765 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:23Z","lastTransitionTime":"2026-03-20T13:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.176941 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.177001 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.177021 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.177047 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.177066 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:23Z","lastTransitionTime":"2026-03-20T13:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.280348 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.280388 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.280399 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.280415 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.280427 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:23Z","lastTransitionTime":"2026-03-20T13:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.383917 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.384004 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.384038 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.384068 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.384090 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:23Z","lastTransitionTime":"2026-03-20T13:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.413703 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.413737 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.413705 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:23 crc kubenswrapper[4690]: E0320 13:24:23.413837 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:23 crc kubenswrapper[4690]: E0320 13:24:23.413917 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:23 crc kubenswrapper[4690]: E0320 13:24:23.413970 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.487099 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.487140 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.487156 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.487179 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.487195 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:23Z","lastTransitionTime":"2026-03-20T13:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.589826 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.589915 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.589932 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.589951 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.589967 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:23Z","lastTransitionTime":"2026-03-20T13:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.692533 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.692608 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.692630 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.692658 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.692678 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:23Z","lastTransitionTime":"2026-03-20T13:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.795909 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.795977 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.795998 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.796028 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.796051 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:23Z","lastTransitionTime":"2026-03-20T13:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.907477 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.908173 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.908237 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.908261 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:23 crc kubenswrapper[4690]: I0320 13:24:23.908276 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:23Z","lastTransitionTime":"2026-03-20T13:24:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.010627 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.010674 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.010685 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.010699 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.010708 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.114332 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.114375 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.114391 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.114409 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.114421 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.218259 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.218326 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.218343 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.218367 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.218386 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.321569 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.321634 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.321646 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.321665 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.321678 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.414139 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:24 crc kubenswrapper[4690]: E0320 13:24:24.414367 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.423449 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.423492 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.423503 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.423519 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.423531 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.437683 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.460741 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.477521 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.495623 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.513815 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.526961 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.527039 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.527054 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.527072 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.527083 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.530277 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.549347 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.567013 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.586092 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.604466 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.621163 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.630259 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.630308 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.630319 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.630341 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.630355 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.637261 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.652533 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.678914 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:15Z\\\",\\\"message\\\":\\\"roller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z]\\\\nI0320 13:24:14.918128 6641 services_controller.go:451] Built service openshift-machine-config-operator/machine-config-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Router\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.703123 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.719905 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:24Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.733202 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.733246 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.733286 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.733306 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.733319 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.836180 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.836408 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.836489 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.836571 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.836681 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.940308 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.940983 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.941142 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.941390 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:24 crc kubenswrapper[4690]: I0320 13:24:24.941521 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:24Z","lastTransitionTime":"2026-03-20T13:24:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.014983 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.015019 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.015029 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.015042 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.015050 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: E0320 13:24:25.031009 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:25Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.036590 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.036644 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.036660 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.036681 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.036695 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: E0320 13:24:25.051871 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:25Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.055895 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.055924 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.055933 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.055976 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.055986 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: E0320 13:24:25.073063 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:25Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.077266 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.077312 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.077322 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.077335 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.077378 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: E0320 13:24:25.095186 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:25Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.098920 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.099044 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.099111 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.099191 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.099259 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: E0320 13:24:25.112033 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:25Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:25Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:25 crc kubenswrapper[4690]: E0320 13:24:25.112543 4690 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.115335 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.115408 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.115427 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.115453 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.115470 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.218463 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.218508 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.218519 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.218533 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.218541 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.322428 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.322492 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.322510 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.322541 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.322561 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.414417 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.414462 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:25 crc kubenswrapper[4690]: E0320 13:24:25.414565 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:25 crc kubenswrapper[4690]: E0320 13:24:25.414719 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.415051 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:25 crc kubenswrapper[4690]: E0320 13:24:25.415258 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.425029 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.425094 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.425121 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.425150 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.425172 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.528365 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.528430 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.528453 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.528481 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.528502 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.631627 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.631690 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.631705 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.631727 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.631743 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.734361 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.734400 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.734410 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.734426 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.734436 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.837033 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.837072 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.837084 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.837101 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.837112 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.939712 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.939750 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.939759 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.939773 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:25 crc kubenswrapper[4690]: I0320 13:24:25.939783 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:25Z","lastTransitionTime":"2026-03-20T13:24:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.041880 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.041914 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.041924 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.041939 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.041949 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.144961 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.145016 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.145029 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.145054 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.145069 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.248789 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.248903 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.248925 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.248955 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.248974 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.352381 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.352429 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.352444 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.352468 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.352488 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.414895 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:26 crc kubenswrapper[4690]: E0320 13:24:26.415091 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.455908 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.455980 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.456003 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.456034 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.456055 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.558589 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.558640 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.558651 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.558669 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.558680 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.662199 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.662248 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.662259 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.662276 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.662286 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.765110 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.765193 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.765203 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.765223 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.765236 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.868720 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.868773 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.868791 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.868815 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.868831 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.971214 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.971249 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.971260 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.971275 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:26 crc kubenswrapper[4690]: I0320 13:24:26.971285 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:26Z","lastTransitionTime":"2026-03-20T13:24:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.074257 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.074291 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.074300 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.074315 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.074325 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:27Z","lastTransitionTime":"2026-03-20T13:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.177750 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.177814 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.177825 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.177861 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.177874 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:27Z","lastTransitionTime":"2026-03-20T13:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.280539 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.280569 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.280579 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.280594 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.280605 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:27Z","lastTransitionTime":"2026-03-20T13:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.383591 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.383633 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.383642 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.383659 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.383668 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:27Z","lastTransitionTime":"2026-03-20T13:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.414285 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.414293 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.414462 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:27 crc kubenswrapper[4690]: E0320 13:24:27.414631 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:27 crc kubenswrapper[4690]: E0320 13:24:27.414759 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:27 crc kubenswrapper[4690]: E0320 13:24:27.414833 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.485774 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.485837 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.485924 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.485946 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.485963 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:27Z","lastTransitionTime":"2026-03-20T13:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.588509 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.588575 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.588588 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.588606 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.588619 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:27Z","lastTransitionTime":"2026-03-20T13:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.691733 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.692290 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.692445 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.692650 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.692826 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:27Z","lastTransitionTime":"2026-03-20T13:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.795792 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.796148 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.796213 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.796274 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.796349 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:27Z","lastTransitionTime":"2026-03-20T13:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.898698 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.898762 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.898776 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.898805 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:27 crc kubenswrapper[4690]: I0320 13:24:27.898822 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:27Z","lastTransitionTime":"2026-03-20T13:24:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.002073 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.002125 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.002142 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.002167 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.002186 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.105374 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.105446 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.105464 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.105489 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.105508 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.208460 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.208541 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.208563 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.208597 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.208619 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.313044 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.313371 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.313519 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.313711 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.313979 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.413964 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:28 crc kubenswrapper[4690]: E0320 13:24:28.414231 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.416708 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.416751 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.416771 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.416793 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.416808 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.521253 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.521314 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.521332 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.521356 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.521375 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.624648 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.624746 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.624778 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.624808 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.624830 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.727394 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.727434 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.727446 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.727461 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.727471 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.831038 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.831127 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.831144 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.831175 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.831194 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.935040 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.935116 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.935151 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.935188 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:28 crc kubenswrapper[4690]: I0320 13:24:28.935217 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:28Z","lastTransitionTime":"2026-03-20T13:24:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.039245 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.039727 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.039987 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.040142 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.040296 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.143552 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.143620 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.143641 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.143670 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.143692 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.246500 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.246547 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.246558 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.246571 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.246583 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.350418 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.350486 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.350502 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.350527 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.350544 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.414202 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.414258 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:29 crc kubenswrapper[4690]: E0320 13:24:29.414410 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.414432 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:29 crc kubenswrapper[4690]: E0320 13:24:29.414615 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:29 crc kubenswrapper[4690]: E0320 13:24:29.415286 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.415966 4690 scope.go:117] "RemoveContainer" containerID="765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.453611 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.453878 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.453966 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.454109 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.454192 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.557285 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.557324 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.557333 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.557348 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.557359 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.661661 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.661719 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.661733 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.661757 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.661773 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.765268 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.765303 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.765319 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.765336 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.765346 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.867398 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.867675 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.867783 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.867874 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.867967 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.970533 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.970779 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.970890 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.970996 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.971067 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:29Z","lastTransitionTime":"2026-03-20T13:24:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.991411 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/1.log" Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.993962 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061"} Mar 20 13:24:29 crc kubenswrapper[4690]: I0320 13:24:29.994571 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.012511 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.026715 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.038794 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.056616 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:15Z\\\",\\\"message\\\":\\\"roller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z]\\\\nI0320 13:24:14.918128 6641 services_controller.go:451] Built service openshift-machine-config-operator/machine-config-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Router\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.066365 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.073006 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.073043 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.073054 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.073071 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.073083 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:30Z","lastTransitionTime":"2026-03-20T13:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.081690 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.093148 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.111632 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.121210 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.138009 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.155454 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.171322 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.174933 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.174965 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.174974 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.174987 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.174997 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:30Z","lastTransitionTime":"2026-03-20T13:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.184637 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.195259 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.209307 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.223690 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:30Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.277328 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.277961 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.278028 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.278097 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.278178 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:30Z","lastTransitionTime":"2026-03-20T13:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.380582 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.380627 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.380639 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.380657 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.380668 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:30Z","lastTransitionTime":"2026-03-20T13:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.414406 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:30 crc kubenswrapper[4690]: E0320 13:24:30.414833 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.483075 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.483397 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.483475 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.483555 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.483620 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:30Z","lastTransitionTime":"2026-03-20T13:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.586149 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.586178 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.586186 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.586199 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.586208 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:30Z","lastTransitionTime":"2026-03-20T13:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.689770 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.689814 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.689826 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.689843 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.689876 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:30Z","lastTransitionTime":"2026-03-20T13:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.793211 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.793277 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.793295 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.793321 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.793338 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:30Z","lastTransitionTime":"2026-03-20T13:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.897081 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.897185 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.897213 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.897244 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:30 crc kubenswrapper[4690]: I0320 13:24:30.897267 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:30Z","lastTransitionTime":"2026-03-20T13:24:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.000138 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.000210 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.000232 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.000259 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.000280 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.001913 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/2.log" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.003411 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/1.log" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.010975 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061" exitCode=1 Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.011032 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.011078 4690 scope.go:117] "RemoveContainer" containerID="765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.012343 4690 scope.go:117] "RemoveContainer" containerID="8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061" Mar 20 13:24:31 crc kubenswrapper[4690]: E0320 13:24:31.012749 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.037220 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.060430 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.085225 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.102823 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.102882 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.102890 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.102908 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.102919 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.107394 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.123366 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.136631 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.158006 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://765720c46a39bb3664162da1a244a40990b482d3da2ce25c904e5a7d8d57290c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:15Z\\\",\\\"message\\\":\\\"roller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:14Z is after 2025-08-24T17:21:41Z]\\\\nI0320 13:24:14.918128 6641 services_controller.go:451] Built service openshift-machine-config-operator/machine-config-controller cluster-wide LB for network=default: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-machine-config-operator/machine-config-controller_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-config-operator/machine-config-controller\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.16\\\\\\\", Port:9001, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Router\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:14Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.171002 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.187221 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.199501 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.205521 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.205554 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.205562 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.205575 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.205583 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.211078 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.223232 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.235511 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.253937 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.264233 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.276756 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:31Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.308587 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.308659 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.308674 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.308726 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.308802 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.412338 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.412392 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.412404 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.412421 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.412433 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.413615 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.413663 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.413908 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:31 crc kubenswrapper[4690]: E0320 13:24:31.414095 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:31 crc kubenswrapper[4690]: E0320 13:24:31.414199 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:31 crc kubenswrapper[4690]: E0320 13:24:31.414314 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.516140 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.516198 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.516257 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.516290 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.516312 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.620022 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.620090 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.620111 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.620139 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.620159 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.723718 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.723805 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.723826 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.723923 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.723957 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.827211 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.827255 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.827273 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.827295 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.827311 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.929519 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.929637 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.929657 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.929692 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:31 crc kubenswrapper[4690]: I0320 13:24:31.929712 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:31Z","lastTransitionTime":"2026-03-20T13:24:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.016305 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/2.log" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.021894 4690 scope.go:117] "RemoveContainer" containerID="8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061" Mar 20 13:24:32 crc kubenswrapper[4690]: E0320 13:24:32.022216 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.033189 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.033231 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.033240 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.033254 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.033265 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.043618 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.064064 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.079288 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.097224 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.113021 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.128966 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.137289 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.137336 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.137347 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.137368 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.137382 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.160636 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.176497 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.196962 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.214322 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.232013 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.241598 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.241663 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.241685 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.241714 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.241734 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.251791 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.271079 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.291168 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.306423 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.323033 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.344257 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.344303 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.344317 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.344333 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.344345 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.413887 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:32 crc kubenswrapper[4690]: E0320 13:24:32.414065 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.447514 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.447586 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.447606 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.447633 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.447661 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.550914 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.550966 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.550982 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.551003 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.551019 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.653682 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.653718 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.653727 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.653739 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.653748 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.756337 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.756377 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.756386 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.756403 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.756414 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.859570 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.859698 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.859719 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.859744 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.859766 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.958758 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.961870 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.961912 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.961925 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.961943 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.961955 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:32Z","lastTransitionTime":"2026-03-20T13:24:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.983721 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:32 crc kubenswrapper[4690]: I0320 13:24:32.998954 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:32Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.017616 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.031998 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.056795 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.064700 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.064736 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.064748 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.064763 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.064774 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.073180 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.087263 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.103079 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.121705 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.137795 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.161819 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.166652 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.166696 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.166708 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.166724 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.166735 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.174197 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.188403 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.206572 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.222185 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.240478 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:33Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.269338 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.269396 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.269413 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.269438 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.269455 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.372805 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.372915 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.372937 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.372965 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.372984 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.414466 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.414612 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:33 crc kubenswrapper[4690]: E0320 13:24:33.414741 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.414606 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:33 crc kubenswrapper[4690]: E0320 13:24:33.414908 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:33 crc kubenswrapper[4690]: E0320 13:24:33.415327 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.476046 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.476357 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.476425 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.476494 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.476566 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.579485 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.579535 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.579548 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.579568 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.579581 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.682499 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.682538 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.682550 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.682565 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.682577 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.785338 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.785385 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.785400 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.785419 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.785430 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.888040 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.888113 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.888123 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.888141 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.888150 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.990943 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.991021 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.991034 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.991049 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:33 crc kubenswrapper[4690]: I0320 13:24:33.991340 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:33Z","lastTransitionTime":"2026-03-20T13:24:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.094083 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.094160 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.094184 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.094213 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.094234 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:34Z","lastTransitionTime":"2026-03-20T13:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.196335 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.196394 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.196413 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.196436 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.196454 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:34Z","lastTransitionTime":"2026-03-20T13:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.301953 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.302043 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.302072 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.302106 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.302144 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:34Z","lastTransitionTime":"2026-03-20T13:24:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:34 crc kubenswrapper[4690]: E0320 13:24:34.403236 4690 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.413442 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:34 crc kubenswrapper[4690]: E0320 13:24:34.413565 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.429062 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.442483 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.469115 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.482893 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.503401 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.516826 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.531625 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.554481 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.566940 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.581167 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: E0320 13:24:34.583148 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.594416 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.605812 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.622508 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.639374 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.656609 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:34 crc kubenswrapper[4690]: I0320 13:24:34.678054 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:34Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.230183 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.230239 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.230251 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.230270 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.230284 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:35Z","lastTransitionTime":"2026-03-20T13:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.251838 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:35Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.256554 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.256642 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.256674 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.256695 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.256724 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.256787 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.256842 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:25:07.256822179 +0000 UTC m=+153.546422142 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.256983 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257004 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:25:07.256957433 +0000 UTC m=+153.546557426 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257019 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257065 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:25:07.257045585 +0000 UTC m=+153.546645638 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257077 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257131 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257162 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257162 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257209 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257245 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 13:25:07.257219979 +0000 UTC m=+153.546819982 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.257164 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.257296 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.257306 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 13:25:07.257283751 +0000 UTC m=+153.546883774 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.257308 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.257506 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.257560 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:35Z","lastTransitionTime":"2026-03-20T13:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.271684 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:35Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.276601 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.276678 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.276702 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.276732 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.276756 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:35Z","lastTransitionTime":"2026-03-20T13:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.292496 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:35Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.296879 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.296912 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.296923 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.296942 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.296954 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:35Z","lastTransitionTime":"2026-03-20T13:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.312613 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:35Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.317220 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.317281 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.317299 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.317323 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.317341 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:35Z","lastTransitionTime":"2026-03-20T13:24:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.332185 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:35Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.332356 4690 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.357951 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.358156 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.358243 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs podName:d459decc-f715-4636-bc35-963ae8133ec7 nodeName:}" failed. No retries permitted until 2026-03-20 13:25:07.358224972 +0000 UTC m=+153.647824915 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs") pod "network-metrics-daemon-rpcmp" (UID: "d459decc-f715-4636-bc35-963ae8133ec7") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.414331 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.414361 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.414463 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:35 crc kubenswrapper[4690]: I0320 13:24:35.414357 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.414565 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:35 crc kubenswrapper[4690]: E0320 13:24:35.414680 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:36 crc kubenswrapper[4690]: I0320 13:24:36.413958 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:36 crc kubenswrapper[4690]: E0320 13:24:36.414122 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:37 crc kubenswrapper[4690]: I0320 13:24:37.413505 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:37 crc kubenswrapper[4690]: I0320 13:24:37.413597 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:37 crc kubenswrapper[4690]: E0320 13:24:37.413640 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:37 crc kubenswrapper[4690]: I0320 13:24:37.413689 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:37 crc kubenswrapper[4690]: E0320 13:24:37.413830 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:37 crc kubenswrapper[4690]: E0320 13:24:37.413993 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:38 crc kubenswrapper[4690]: I0320 13:24:38.414191 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:38 crc kubenswrapper[4690]: E0320 13:24:38.414385 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:38 crc kubenswrapper[4690]: I0320 13:24:38.427107 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Mar 20 13:24:39 crc kubenswrapper[4690]: I0320 13:24:39.413798 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:39 crc kubenswrapper[4690]: I0320 13:24:39.413834 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:39 crc kubenswrapper[4690]: I0320 13:24:39.413910 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:39 crc kubenswrapper[4690]: E0320 13:24:39.414003 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:39 crc kubenswrapper[4690]: E0320 13:24:39.414258 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:39 crc kubenswrapper[4690]: E0320 13:24:39.414302 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:39 crc kubenswrapper[4690]: E0320 13:24:39.584782 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:24:40 crc kubenswrapper[4690]: I0320 13:24:40.413534 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:40 crc kubenswrapper[4690]: E0320 13:24:40.413747 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:41 crc kubenswrapper[4690]: I0320 13:24:41.413761 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:41 crc kubenswrapper[4690]: I0320 13:24:41.413936 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:41 crc kubenswrapper[4690]: E0320 13:24:41.413990 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:41 crc kubenswrapper[4690]: I0320 13:24:41.413761 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:41 crc kubenswrapper[4690]: E0320 13:24:41.414101 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:41 crc kubenswrapper[4690]: E0320 13:24:41.414351 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:42 crc kubenswrapper[4690]: I0320 13:24:42.413832 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:42 crc kubenswrapper[4690]: E0320 13:24:42.414006 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:43 crc kubenswrapper[4690]: I0320 13:24:43.414410 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:43 crc kubenswrapper[4690]: I0320 13:24:43.414472 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:43 crc kubenswrapper[4690]: I0320 13:24:43.415031 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:43 crc kubenswrapper[4690]: E0320 13:24:43.415161 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:43 crc kubenswrapper[4690]: E0320 13:24:43.415316 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:43 crc kubenswrapper[4690]: E0320 13:24:43.415562 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:43 crc kubenswrapper[4690]: I0320 13:24:43.415644 4690 scope.go:117] "RemoveContainer" containerID="8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061" Mar 20 13:24:43 crc kubenswrapper[4690]: E0320 13:24:43.416036 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.414228 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:44 crc kubenswrapper[4690]: E0320 13:24:44.414433 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.433999 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.447380 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000e4968-0075-40bd-bce9-d70dd446c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://137ba462df423c3672b46a99fcf109ef7015e562a9148a33af4213a4451a1577\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:00Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0320 13:22:36.739027 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0320 13:22:36.744583 1 observer_polling.go:159] Starting file observer\\\\nI0320 13:22:36.812096 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0320 13:22:36.820740 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0320 13:23:00.576831 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0320 13:23:00.577039 1 cmd.go:179] failed checking apiserver connectivity: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:23:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa821d2408b518728ebb082a1668cab249f0bb2cfb1d9b0d08ad757ab569dbc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d73770fe767dfbe3ba9d648041de8aebb202eca97f775f5162dd291e39d9cc3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.462491 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.475463 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.489818 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.523798 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.550386 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.569154 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.584683 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: E0320 13:24:44.585270 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.605573 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.616780 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.629566 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.648868 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.660738 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.674108 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.699329 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:44 crc kubenswrapper[4690]: I0320 13:24:44.718001 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:44Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.413737 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.413803 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.413737 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:45 crc kubenswrapper[4690]: E0320 13:24:45.414022 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:45 crc kubenswrapper[4690]: E0320 13:24:45.414204 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:45 crc kubenswrapper[4690]: E0320 13:24:45.414308 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.437190 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.437258 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.437282 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.437309 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.437333 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:45Z","lastTransitionTime":"2026-03-20T13:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:45 crc kubenswrapper[4690]: E0320 13:24:45.458677 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:45Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.464399 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.464456 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.464467 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.464486 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.464499 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:45Z","lastTransitionTime":"2026-03-20T13:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:45 crc kubenswrapper[4690]: E0320 13:24:45.485082 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:45Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.490545 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.490591 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.490609 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.490633 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.490651 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:45Z","lastTransitionTime":"2026-03-20T13:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:45 crc kubenswrapper[4690]: E0320 13:24:45.511647 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:45Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.518329 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.518644 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.518786 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.518981 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.519134 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:45Z","lastTransitionTime":"2026-03-20T13:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:45 crc kubenswrapper[4690]: E0320 13:24:45.538211 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:45Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.543731 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.544014 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.544169 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.544317 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:45 crc kubenswrapper[4690]: I0320 13:24:45.544468 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:45Z","lastTransitionTime":"2026-03-20T13:24:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:45 crc kubenswrapper[4690]: E0320 13:24:45.563694 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:45Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:45 crc kubenswrapper[4690]: E0320 13:24:45.563958 4690 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 13:24:46 crc kubenswrapper[4690]: I0320 13:24:46.413580 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:46 crc kubenswrapper[4690]: E0320 13:24:46.413784 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:47 crc kubenswrapper[4690]: I0320 13:24:47.413424 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:47 crc kubenswrapper[4690]: I0320 13:24:47.413444 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:47 crc kubenswrapper[4690]: E0320 13:24:47.414384 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:47 crc kubenswrapper[4690]: E0320 13:24:47.414511 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:47 crc kubenswrapper[4690]: I0320 13:24:47.413523 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:47 crc kubenswrapper[4690]: E0320 13:24:47.414604 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:48 crc kubenswrapper[4690]: I0320 13:24:48.413893 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:48 crc kubenswrapper[4690]: E0320 13:24:48.414045 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:49 crc kubenswrapper[4690]: I0320 13:24:49.414181 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:49 crc kubenswrapper[4690]: I0320 13:24:49.414206 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:49 crc kubenswrapper[4690]: I0320 13:24:49.414336 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:49 crc kubenswrapper[4690]: E0320 13:24:49.414490 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:49 crc kubenswrapper[4690]: E0320 13:24:49.414829 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:49 crc kubenswrapper[4690]: E0320 13:24:49.415167 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:49 crc kubenswrapper[4690]: E0320 13:24:49.587094 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:24:50 crc kubenswrapper[4690]: I0320 13:24:50.414266 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:50 crc kubenswrapper[4690]: E0320 13:24:50.414565 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.092307 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/0.log" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.092371 4690 generic.go:334] "Generic (PLEG): container finished" podID="d83a0d76-2d76-4202-a2f1-42b9ccb66802" containerID="574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a" exitCode=1 Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.092417 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgtf4" event={"ID":"d83a0d76-2d76-4202-a2f1-42b9ccb66802","Type":"ContainerDied","Data":"574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a"} Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.092967 4690 scope.go:117] "RemoveContainer" containerID="574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.108258 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.120525 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:51Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:50Z\\\",\\\"message\\\":\\\"2026-03-20T13:24:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861\\\\n2026-03-20T13:24:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861 to /host/opt/cni/bin/\\\\n2026-03-20T13:24:05Z [verbose] multus-daemon started\\\\n2026-03-20T13:24:05Z [verbose] Readiness Indicator file check\\\\n2026-03-20T13:24:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.132240 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000e4968-0075-40bd-bce9-d70dd446c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://137ba462df423c3672b46a99fcf109ef7015e562a9148a33af4213a4451a1577\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:00Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0320 13:22:36.739027 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0320 13:22:36.744583 1 observer_polling.go:159] Starting file observer\\\\nI0320 13:22:36.812096 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0320 13:22:36.820740 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0320 13:23:00.576831 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0320 13:23:00.577039 1 cmd.go:179] failed checking apiserver connectivity: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:23:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa821d2408b518728ebb082a1668cab249f0bb2cfb1d9b0d08ad757ab569dbc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d73770fe767dfbe3ba9d648041de8aebb202eca97f775f5162dd291e39d9cc3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.146054 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.161923 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.174712 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.187762 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.199731 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.221974 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.235584 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.252787 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.265570 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.276145 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.287598 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.298140 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.324017 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.335158 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:51Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.413989 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:51 crc kubenswrapper[4690]: E0320 13:24:51.414096 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.414245 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:51 crc kubenswrapper[4690]: E0320 13:24:51.414290 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:51 crc kubenswrapper[4690]: I0320 13:24:51.414381 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:51 crc kubenswrapper[4690]: E0320 13:24:51.414422 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.099454 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/0.log" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.099573 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgtf4" event={"ID":"d83a0d76-2d76-4202-a2f1-42b9ccb66802","Type":"ContainerStarted","Data":"95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42"} Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.119489 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.139464 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.157316 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.179399 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:50Z\\\",\\\"message\\\":\\\"2026-03-20T13:24:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861\\\\n2026-03-20T13:24:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861 to /host/opt/cni/bin/\\\\n2026-03-20T13:24:05Z [verbose] multus-daemon started\\\\n2026-03-20T13:24:05Z [verbose] Readiness Indicator file check\\\\n2026-03-20T13:24:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.200227 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000e4968-0075-40bd-bce9-d70dd446c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://137ba462df423c3672b46a99fcf109ef7015e562a9148a33af4213a4451a1577\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:00Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0320 13:22:36.739027 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0320 13:22:36.744583 1 observer_polling.go:159] Starting file observer\\\\nI0320 13:22:36.812096 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0320 13:22:36.820740 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0320 13:23:00.576831 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0320 13:23:00.577039 1 cmd.go:179] failed checking apiserver connectivity: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:23:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa821d2408b518728ebb082a1668cab249f0bb2cfb1d9b0d08ad757ab569dbc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d73770fe767dfbe3ba9d648041de8aebb202eca97f775f5162dd291e39d9cc3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.221097 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.242954 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.263565 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.281920 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.299677 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.332165 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.349486 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.372090 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.391628 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.413711 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.413839 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:52 crc kubenswrapper[4690]: E0320 13:24:52.414042 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.426829 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.426929 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Mar 20 13:24:52 crc kubenswrapper[4690]: I0320 13:24:52.443723 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:52Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:53 crc kubenswrapper[4690]: I0320 13:24:53.413390 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:53 crc kubenswrapper[4690]: I0320 13:24:53.413453 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:53 crc kubenswrapper[4690]: E0320 13:24:53.413613 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:53 crc kubenswrapper[4690]: E0320 13:24:53.413701 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:53 crc kubenswrapper[4690]: I0320 13:24:53.413453 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:53 crc kubenswrapper[4690]: E0320 13:24:53.413784 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.414501 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:54 crc kubenswrapper[4690]: E0320 13:24:54.414772 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.442195 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.459884 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.476487 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb80782f-3384-4b4d-be7e-f28f00f68403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37231baa7335f4a160515c92702922b5c3e33d30b11b67edd146d015edc2a199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.497674 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.517280 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.528912 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.546330 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.564013 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.585681 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: E0320 13:24:54.587900 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.603222 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.616648 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.630690 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:50Z\\\",\\\"message\\\":\\\"2026-03-20T13:24:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861\\\\n2026-03-20T13:24:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861 to /host/opt/cni/bin/\\\\n2026-03-20T13:24:05Z [verbose] multus-daemon started\\\\n2026-03-20T13:24:05Z [verbose] Readiness Indicator file check\\\\n2026-03-20T13:24:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.645331 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000e4968-0075-40bd-bce9-d70dd446c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://137ba462df423c3672b46a99fcf109ef7015e562a9148a33af4213a4451a1577\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:00Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0320 13:22:36.739027 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0320 13:22:36.744583 1 observer_polling.go:159] Starting file observer\\\\nI0320 13:22:36.812096 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0320 13:22:36.820740 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0320 13:23:00.576831 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0320 13:23:00.577039 1 cmd.go:179] failed checking apiserver connectivity: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:23:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa821d2408b518728ebb082a1668cab249f0bb2cfb1d9b0d08ad757ab569dbc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d73770fe767dfbe3ba9d648041de8aebb202eca97f775f5162dd291e39d9cc3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.658774 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.672145 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.684984 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.703030 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:54 crc kubenswrapper[4690]: I0320 13:24:54.719822 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:54Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.414062 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:55 crc kubenswrapper[4690]: E0320 13:24:55.414240 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.414260 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:55 crc kubenswrapper[4690]: E0320 13:24:55.414443 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.414534 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:55 crc kubenswrapper[4690]: E0320 13:24:55.414644 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.429957 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.643169 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.643215 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.643223 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.643240 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.643252 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:55Z","lastTransitionTime":"2026-03-20T13:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:55 crc kubenswrapper[4690]: E0320 13:24:55.657686 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:55Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.662241 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.662296 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.662308 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.662332 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.662346 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:55Z","lastTransitionTime":"2026-03-20T13:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:55 crc kubenswrapper[4690]: E0320 13:24:55.680998 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:55Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.685096 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.685164 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.685181 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.685207 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.685225 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:55Z","lastTransitionTime":"2026-03-20T13:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:55 crc kubenswrapper[4690]: E0320 13:24:55.700451 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:55Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.704128 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.704196 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.704219 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.704247 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.704267 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:55Z","lastTransitionTime":"2026-03-20T13:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:55 crc kubenswrapper[4690]: E0320 13:24:55.724580 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:55Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.728463 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.728493 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.728503 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.728519 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:24:55 crc kubenswrapper[4690]: I0320 13:24:55.728527 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:24:55Z","lastTransitionTime":"2026-03-20T13:24:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:24:55 crc kubenswrapper[4690]: E0320 13:24:55.740236 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:55Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:55 crc kubenswrapper[4690]: E0320 13:24:55.740642 4690 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 13:24:56 crc kubenswrapper[4690]: I0320 13:24:56.414257 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:56 crc kubenswrapper[4690]: E0320 13:24:56.414632 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:56 crc kubenswrapper[4690]: I0320 13:24:56.415099 4690 scope.go:117] "RemoveContainer" containerID="8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.118350 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/2.log" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.120965 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.121505 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.147961 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.163289 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.179467 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.193107 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.209771 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.228062 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.246348 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.262798 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:50Z\\\",\\\"message\\\":\\\"2026-03-20T13:24:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861\\\\n2026-03-20T13:24:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861 to /host/opt/cni/bin/\\\\n2026-03-20T13:24:05Z [verbose] multus-daemon started\\\\n2026-03-20T13:24:05Z [verbose] Readiness Indicator file check\\\\n2026-03-20T13:24:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.280813 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000e4968-0075-40bd-bce9-d70dd446c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://137ba462df423c3672b46a99fcf109ef7015e562a9148a33af4213a4451a1577\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:00Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0320 13:22:36.739027 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0320 13:22:36.744583 1 observer_polling.go:159] Starting file observer\\\\nI0320 13:22:36.812096 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0320 13:22:36.820740 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0320 13:23:00.576831 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0320 13:23:00.577039 1 cmd.go:179] failed checking apiserver connectivity: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:23:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa821d2408b518728ebb082a1668cab249f0bb2cfb1d9b0d08ad757ab569dbc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d73770fe767dfbe3ba9d648041de8aebb202eca97f775f5162dd291e39d9cc3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.299212 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.316596 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5925f9c0-691d-4b83-910a-6234888e848c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bb4e5a138b082a96715b6e5c220e87020b4480a60f63c9c79669a0cce317fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7157d887bc71eb3e9b9d3a40054646b9b47f64a296d7a63890bcef190e26fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5112fb8ae9a44756acabacd15d9f1c29b5b96b39fc8ef42f488c15058fc5679f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.334139 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.349363 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.360305 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.379972 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.393252 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.410144 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb80782f-3384-4b4d-be7e-f28f00f68403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37231baa7335f4a160515c92702922b5c3e33d30b11b67edd146d015edc2a199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.414171 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.414235 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.414290 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:57 crc kubenswrapper[4690]: E0320 13:24:57.414331 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:57 crc kubenswrapper[4690]: E0320 13:24:57.414512 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:57 crc kubenswrapper[4690]: E0320 13:24:57.414589 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.426585 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:57 crc kubenswrapper[4690]: I0320 13:24:57.441739 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:57Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.127342 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/3.log" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.128161 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/2.log" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.131504 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" exitCode=1 Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.131543 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.131946 4690 scope.go:117] "RemoveContainer" containerID="8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.134377 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:24:58 crc kubenswrapper[4690]: E0320 13:24:58.134892 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.153601 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.170489 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.188478 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.201012 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.217813 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:50Z\\\",\\\"message\\\":\\\"2026-03-20T13:24:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861\\\\n2026-03-20T13:24:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861 to /host/opt/cni/bin/\\\\n2026-03-20T13:24:05Z [verbose] multus-daemon started\\\\n2026-03-20T13:24:05Z [verbose] Readiness Indicator file check\\\\n2026-03-20T13:24:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.230271 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000e4968-0075-40bd-bce9-d70dd446c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://137ba462df423c3672b46a99fcf109ef7015e562a9148a33af4213a4451a1577\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:00Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0320 13:22:36.739027 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0320 13:22:36.744583 1 observer_polling.go:159] Starting file observer\\\\nI0320 13:22:36.812096 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0320 13:22:36.820740 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0320 13:23:00.576831 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0320 13:23:00.577039 1 cmd.go:179] failed checking apiserver connectivity: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:23:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa821d2408b518728ebb082a1668cab249f0bb2cfb1d9b0d08ad757ab569dbc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d73770fe767dfbe3ba9d648041de8aebb202eca97f775f5162dd291e39d9cc3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.242873 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.253991 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5925f9c0-691d-4b83-910a-6234888e848c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bb4e5a138b082a96715b6e5c220e87020b4480a60f63c9c79669a0cce317fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7157d887bc71eb3e9b9d3a40054646b9b47f64a296d7a63890bcef190e26fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5112fb8ae9a44756acabacd15d9f1c29b5b96b39fc8ef42f488c15058fc5679f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.267682 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.281144 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.294366 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.320120 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8588f021420a98c02413905710fac84131cdd2dbaa38654ae7e311732e9bc061\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:30Z\\\",\\\"message\\\":\\\"712973235162149816) with []\\\\nI0320 13:24:30.331752 6854 address_set.go:302] New(aa6fc2dc-fab0-4812-b9da-809058e4dcf7/default-network-controller:EgressIP:egressip-served-pods:v4:default/a8519615025667110816) with []\\\\nI0320 13:24:30.331799 6854 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0320 13:24:30.332136 6854 factory.go:1336] Added *v1.Node event handler 7\\\\nI0320 13:24:30.332295 6854 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0320 13:24:30.333039 6854 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0320 13:24:30.333349 6854 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0320 13:24:30.333411 6854 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0320 13:24:30.333426 6854 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0320 13:24:30.333521 6854 factory.go:656] Stopping watch factory\\\\nI0320 13:24:30.333554 6854 ovnkube.go:599] Stopped ovnkube\\\\nI0320 13:24:30.333592 6854 handler.go:208] Removed *v1.Node event handler 2\\\\nI0320 13:24:30.333615 6854 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0320 13:24:30.333629 6854 handler.go:208] Removed *v1.Node event handler 7\\\\nF0320 13:24:30.333724 6854 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:57Z\\\",\\\"message\\\":\\\"ervices.Addr{IP:\\\\\\\"10.217.5.214\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0320 13:24:57.320055 7174 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0320 13:24:57.319718 7174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certif\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.331744 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.346229 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb80782f-3384-4b4d-be7e-f28f00f68403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37231baa7335f4a160515c92702922b5c3e33d30b11b67edd146d015edc2a199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.365923 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.379310 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.404132 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.414164 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:24:58 crc kubenswrapper[4690]: E0320 13:24:58.414306 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.416289 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:58 crc kubenswrapper[4690]: I0320 13:24:58.433417 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:58Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.135966 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/3.log" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.140127 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:24:59 crc kubenswrapper[4690]: E0320 13:24:59.140266 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.151811 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb80782f-3384-4b4d-be7e-f28f00f68403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37231baa7335f4a160515c92702922b5c3e33d30b11b67edd146d015edc2a199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.165094 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.178964 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5925f9c0-691d-4b83-910a-6234888e848c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bb4e5a138b082a96715b6e5c220e87020b4480a60f63c9c79669a0cce317fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7157d887bc71eb3e9b9d3a40054646b9b47f64a296d7a63890bcef190e26fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5112fb8ae9a44756acabacd15d9f1c29b5b96b39fc8ef42f488c15058fc5679f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.198219 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.216824 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.231224 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.261524 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:57Z\\\",\\\"message\\\":\\\"ervices.Addr{IP:\\\\\\\"10.217.5.214\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0320 13:24:57.320055 7174 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0320 13:24:57.319718 7174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certif\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.273197 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.291045 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.320079 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.338738 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.358468 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.379209 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000e4968-0075-40bd-bce9-d70dd446c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://137ba462df423c3672b46a99fcf109ef7015e562a9148a33af4213a4451a1577\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:00Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0320 13:22:36.739027 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0320 13:22:36.744583 1 observer_polling.go:159] Starting file observer\\\\nI0320 13:22:36.812096 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0320 13:22:36.820740 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0320 13:23:00.576831 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0320 13:23:00.577039 1 cmd.go:179] failed checking apiserver connectivity: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:23:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa821d2408b518728ebb082a1668cab249f0bb2cfb1d9b0d08ad757ab569dbc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d73770fe767dfbe3ba9d648041de8aebb202eca97f775f5162dd291e39d9cc3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.396493 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.413591 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.413635 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:24:59 crc kubenswrapper[4690]: E0320 13:24:59.413804 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:24:59 crc kubenswrapper[4690]: E0320 13:24:59.413928 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.414046 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:24:59 crc kubenswrapper[4690]: E0320 13:24:59.414254 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.416576 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.429876 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.446352 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.458957 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: I0320 13:24:59.500938 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:50Z\\\",\\\"message\\\":\\\"2026-03-20T13:24:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861\\\\n2026-03-20T13:24:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861 to /host/opt/cni/bin/\\\\n2026-03-20T13:24:05Z [verbose] multus-daemon started\\\\n2026-03-20T13:24:05Z [verbose] Readiness Indicator file check\\\\n2026-03-20T13:24:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:24:59Z is after 2025-08-24T17:21:41Z" Mar 20 13:24:59 crc kubenswrapper[4690]: E0320 13:24:59.589722 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:00 crc kubenswrapper[4690]: I0320 13:25:00.414453 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:00 crc kubenswrapper[4690]: E0320 13:25:00.414718 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:01 crc kubenswrapper[4690]: I0320 13:25:01.414537 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:01 crc kubenswrapper[4690]: I0320 13:25:01.414601 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:01 crc kubenswrapper[4690]: I0320 13:25:01.414643 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:01 crc kubenswrapper[4690]: E0320 13:25:01.414796 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:01 crc kubenswrapper[4690]: E0320 13:25:01.414960 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:01 crc kubenswrapper[4690]: E0320 13:25:01.415100 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:02 crc kubenswrapper[4690]: I0320 13:25:02.414446 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:02 crc kubenswrapper[4690]: E0320 13:25:02.414659 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:03 crc kubenswrapper[4690]: I0320 13:25:03.414416 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:03 crc kubenswrapper[4690]: I0320 13:25:03.414519 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:03 crc kubenswrapper[4690]: I0320 13:25:03.414418 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:03 crc kubenswrapper[4690]: E0320 13:25:03.414725 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:03 crc kubenswrapper[4690]: E0320 13:25:03.414981 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:03 crc kubenswrapper[4690]: E0320 13:25:03.415220 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.414293 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:04 crc kubenswrapper[4690]: E0320 13:25:04.414450 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.430996 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"000e4968-0075-40bd-bce9-d70dd446c7d3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://137ba462df423c3672b46a99fcf109ef7015e562a9148a33af4213a4451a1577\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8e5af2544ceca29da3b7ec1ead4ca43896be750ef985f1475a17ce6cc26c4dc1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:00Z\\\",\\\"message\\\":\\\"+ timeout 3m /bin/bash -exuo pipefail -c 'while [ -n \\\\\\\"$(ss -Htanop \\\\\\\\( sport = 10357 \\\\\\\\))\\\\\\\" ]; do sleep 1; done'\\\\n++ ss -Htanop '(' sport = 10357 ')'\\\\n+ '[' -n '' ']'\\\\n+ exec cluster-policy-controller start --config=/etc/kubernetes/static-pod-resources/configmaps/cluster-policy-controller-config/config.yaml --kubeconfig=/etc/kubernetes/static-pod-resources/configmaps/controller-manager-kubeconfig/kubeconfig --namespace=openshift-kube-controller-manager -v=2\\\\nI0320 13:22:36.739027 1 leaderelection.go:121] The leader election gives 4 retries and allows for 30s of clock skew. The kube-apiserver downtime tolerance is 78s. Worst non-graceful lease acquisition is 2m43s. Worst graceful lease acquisition is {26s}.\\\\nI0320 13:22:36.744583 1 observer_polling.go:159] Starting file observer\\\\nI0320 13:22:36.812096 1 builder.go:298] cluster-policy-controller version 4.18.0-202501230001.p0.g5fd8525.assembly.stream.el9-5fd8525-5fd852525909ce6eab52972ba9ce8fcf56528eb9\\\\nI0320 13:22:36.820740 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.crt::/etc/kubernetes/static-pod-resources/secrets/serving-cert/tls.key\\\\\\\"\\\\nI0320 13:23:00.576831 1 cmd.go:138] Received SIGTERM or SIGINT signal, shutting down controller.\\\\nF0320 13:23:00.577039 1 cmd.go:179] failed checking apiserver connectivity: Unauthorized\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:23:00Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://aa821d2408b518728ebb082a1668cab249f0bb2cfb1d9b0d08ad757ab569dbc5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d73770fe767dfbe3ba9d648041de8aebb202eca97f775f5162dd291e39d9cc3\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.442436 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1c65335bd9b81673fc89accc42761cb41a3ccecd7b4f324af5b0040d5f5c17bf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.454905 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e66256e9f949eed96cdc081bfadc87c558f03045d0c26a1ea29557f4d44b7cbe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8a69a45fca09b555428fe2d40a4eb4eba6b364a2c47b58d7c548e64dbc116cee\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.465891 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.476685 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://88a1f257d419337e5564a3381425f73c4d608de8eb9e065c1dd286251ae303eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.485193 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-lgtw8" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a72940bb-614b-417f-9e8b-bcfddae31f96\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eff9e18ee3d2f3df382312c80bd6e82e00f58e6c2c4fdaf081552a857770afd6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-jdt8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-lgtw8\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.495239 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-pgtf4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d83a0d76-2d76-4202-a2f1-42b9ccb66802\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:50Z\\\",\\\"message\\\":\\\"2026-03-20T13:24:05+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861\\\\n2026-03-20T13:24:05+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_da633976-5954-42ec-bc62-637ea730d861 to /host/opt/cni/bin/\\\\n2026-03-20T13:24:05Z [verbose] multus-daemon started\\\\n2026-03-20T13:24:05Z [verbose] Readiness Indicator file check\\\\n2026-03-20T13:24:50Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-55l66\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-pgtf4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.503401 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-hq77p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fba1e50b-81f4-438f-b056-3f8cbee7fad1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://33aee9d3ec2c8ae2012bd73f3946156f47a55b909469f9dc5dc1a24740bcf3c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4l4jb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-hq77p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.511167 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eb80782f-3384-4b4d-be7e-f28f00f68403\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37231baa7335f4a160515c92702922b5c3e33d30b11b67edd146d015edc2a199\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3edb4a40867aca0f25e34d2d4852cd99265bf1f5c2667b6abae0e1dedd4b9260\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.522359 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"105ebbdb-a98c-4fca-bf5f-667f0090e9c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-03-20T13:23:32Z\\\",\\\"message\\\":\\\"file observer\\\\nW0320 13:23:31.854621 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0320 13:23:31.854798 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0320 13:23:31.855746 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-640963218/tls.crt::/tmp/serving-cert-640963218/tls.key\\\\\\\"\\\\nI0320 13:23:32.356862 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0320 13:23:32.358488 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0320 13:23:32.358542 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0320 13:23:32.358590 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0320 13:23:32.358616 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0320 13:23:32.362820 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW0320 13:23:32.362930 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362955 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0320 13:23:32.362968 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0320 13:23:32.362978 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0320 13:23:32.362987 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0320 13:23:32.362996 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI0320 13:23:32.362830 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF0320 13:23:32.363555 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:23:31Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":4,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.532872 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5925f9c0-691d-4b83-910a-6234888e848c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:23:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://43bb4e5a138b082a96715b6e5c220e87020b4480a60f63c9c79669a0cce317fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d7157d887bc71eb3e9b9d3a40054646b9b47f64a296d7a63890bcef190e26fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5112fb8ae9a44756acabacd15d9f1c29b5b96b39fc8ef42f488c15058fc5679f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://956b20cd012d16fb9344b7c84ad4a378993d1bff7d74ffe428007db4ca43def9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.543930 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.556763 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.566722 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d459decc-f715-4636-bc35-963ae8133ec7\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-5gh6s\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-rpcmp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.587182 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"874238ac-6c4c-40c9-ad22-1bec31020fb6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-03-20T13:24:57Z\\\",\\\"message\\\":\\\"ervices.Addr{IP:\\\\\\\"10.217.5.214\\\\\\\", Port:50051, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI0320 13:24:57.320055 7174 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:c94130be-172c-477c-88c4-40cc7eba30fe}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0320 13:24:57.319718 7174 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certif\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7x88j\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-x2b7f\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: E0320 13:25:04.590168 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.602160 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"60ded650-b298-4115-8286-8969b94d4062\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://98392b52872cba37d5c5cd5bc58a5ed4c8adff48ddddad64c1a63ec1c59adfe0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-frghp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ftcqx\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.626944 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"cd4688c5-c244-4b3f-a461-86c777f4f56a\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:56Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:22:34Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://48f89ae539fad7734313ca31e8c507002065a897a12d1af33c3131a4420fa74f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0fc1f3c698ee1bbb8f809d6fd9c6e6cc7e725f952a5c94743d7748f9f5b043eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0b86dcff13f7b1d6139226f0ed2f5ff37ec76750c1569589ea610cdd8651e11c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1200a4d26afc6325b208e0427c9188d190f84486cea115182766ddfddff998ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://528f119a4e430863b0edbc01e0b9e0204be32f248b51234000d4f15144b97ff4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:22:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://5c213dc735276d61519b42fa355df135fc80c0ab77434867bd338d8ecbee3c28\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:35Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6ce976c55e75ddc8039f322cee085b7f03db27f81c7b87b723e1cf586ac19ca4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:36Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:36Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb82d51152e8ad4bafc2d5d0c5afc0bed4e5339e040d66e6a627beeb1f922cc6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:22:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:22:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:22:34Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.638366 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"372b2734-220f-4e91-98c1-dbb9d1042273\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://89d01ad377993d8a2800e0bdf4e26de46d02a3469f7f937ca764a54b3da2f933\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f47b1a7b417bf597223e702c6ec7cd3a8b9ebb7b374b6954dcb65178d10f080f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-l74sw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-qnv4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:04 crc kubenswrapper[4690]: I0320 13:25:04.654497 4690 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"606df6ba-3dfe-48de-8890-9a5a0c030d23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-03-20T13:24:10Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4c3f6019f27ec16b83c45a6e4be19f0e01508ea97d9283faf15c8a5f8f29447d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-03-20T13:24:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7b9b907a748a45ba3131f2e873c86d0705ce4693bc7ea4ae21c412b69218bc33\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e6cdef4cbf0c688bbbbb36a12d931f2c33c496842db06eb66d8b6531b563f338\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9e904258fa6e9f555996d83b6093f2e4b1d59a983749ca90f0f16f24ea7d331c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:06Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4c9df33b3a797fa5f4270daa42fef1629b3f40a9d85f3f690645859082740ca3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:07Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://26c529584d4c5da34c7f7b3f2ed58a5c93d7a83f2d2bc5af4f8bc700eac6df5b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7f5b62eaedaa779440ee2f55f013c3378587c47f5fa2080bcf08621233311004\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-03-20T13:24:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-03-20T13:24:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-7h2w5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-03-20T13:24:03Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-t4t2r\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:04Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:05 crc kubenswrapper[4690]: I0320 13:25:05.413768 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:05 crc kubenswrapper[4690]: I0320 13:25:05.413869 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:05 crc kubenswrapper[4690]: I0320 13:25:05.413949 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:05 crc kubenswrapper[4690]: E0320 13:25:05.414109 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:05 crc kubenswrapper[4690]: E0320 13:25:05.414285 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:05 crc kubenswrapper[4690]: E0320 13:25:05.414365 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.053270 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.053314 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.053325 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.053343 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.053355 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:25:06Z","lastTransitionTime":"2026-03-20T13:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:25:06 crc kubenswrapper[4690]: E0320 13:25:06.074954 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.079364 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.079405 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.079414 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.079430 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.079440 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:25:06Z","lastTransitionTime":"2026-03-20T13:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:25:06 crc kubenswrapper[4690]: E0320 13:25:06.095358 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.099344 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.099378 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.099389 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.099406 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.099418 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:25:06Z","lastTransitionTime":"2026-03-20T13:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:25:06 crc kubenswrapper[4690]: E0320 13:25:06.113147 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.119117 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.119169 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.119184 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.119206 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.119222 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:25:06Z","lastTransitionTime":"2026-03-20T13:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:25:06 crc kubenswrapper[4690]: E0320 13:25:06.133477 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.137066 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.137130 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.137154 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.137182 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.137202 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:25:06Z","lastTransitionTime":"2026-03-20T13:25:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:25:06 crc kubenswrapper[4690]: E0320 13:25:06.148764 4690 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404564Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865364Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-03-20T13:25:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"64cb4bde-531a-46e3-b83b-f3ca53756a20\\\",\\\"systemUUID\\\":\\\"0709f114-e447-44a1-aacc-6ba4cd210e43\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-03-20T13:25:06Z is after 2025-08-24T17:21:41Z" Mar 20 13:25:06 crc kubenswrapper[4690]: E0320 13:25:06.148906 4690 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Mar 20 13:25:06 crc kubenswrapper[4690]: I0320 13:25:06.414181 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:06 crc kubenswrapper[4690]: E0320 13:25:06.414357 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:07 crc kubenswrapper[4690]: I0320 13:25:07.307421 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.307715 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:11.307679934 +0000 UTC m=+217.597279927 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:25:07 crc kubenswrapper[4690]: I0320 13:25:07.308177 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:07 crc kubenswrapper[4690]: I0320 13:25:07.308232 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:07 crc kubenswrapper[4690]: I0320 13:25:07.308296 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:07 crc kubenswrapper[4690]: I0320 13:25:07.308360 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308431 4690 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308513 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308531 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308555 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308560 4690 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308575 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:26:11.308539098 +0000 UTC m=+217.598139101 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308590 4690 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308624 4690 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308585 4690 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308672 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-03-20 13:26:11.308649411 +0000 UTC m=+217.598249454 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308819 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-03-20 13:26:11.308794995 +0000 UTC m=+217.598394978 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.308841 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-03-20 13:26:11.308829526 +0000 UTC m=+217.598429509 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Mar 20 13:25:07 crc kubenswrapper[4690]: I0320 13:25:07.409054 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.409219 4690 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.409312 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs podName:d459decc-f715-4636-bc35-963ae8133ec7 nodeName:}" failed. No retries permitted until 2026-03-20 13:26:11.409295015 +0000 UTC m=+217.698894958 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs") pod "network-metrics-daemon-rpcmp" (UID: "d459decc-f715-4636-bc35-963ae8133ec7") : object "openshift-multus"/"metrics-daemon-secret" not registered Mar 20 13:25:07 crc kubenswrapper[4690]: I0320 13:25:07.413921 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:07 crc kubenswrapper[4690]: I0320 13:25:07.414042 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.414198 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:07 crc kubenswrapper[4690]: I0320 13:25:07.414411 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.414495 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:07 crc kubenswrapper[4690]: E0320 13:25:07.414882 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:08 crc kubenswrapper[4690]: I0320 13:25:08.413879 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:08 crc kubenswrapper[4690]: E0320 13:25:08.414082 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:09 crc kubenswrapper[4690]: I0320 13:25:09.413762 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:09 crc kubenswrapper[4690]: I0320 13:25:09.413767 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:09 crc kubenswrapper[4690]: I0320 13:25:09.413796 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:09 crc kubenswrapper[4690]: E0320 13:25:09.414136 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:09 crc kubenswrapper[4690]: E0320 13:25:09.414285 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:09 crc kubenswrapper[4690]: E0320 13:25:09.414424 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:09 crc kubenswrapper[4690]: E0320 13:25:09.591810 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:10 crc kubenswrapper[4690]: I0320 13:25:10.414181 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:10 crc kubenswrapper[4690]: E0320 13:25:10.414421 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:10 crc kubenswrapper[4690]: I0320 13:25:10.417170 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:25:10 crc kubenswrapper[4690]: E0320 13:25:10.417543 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:25:11 crc kubenswrapper[4690]: I0320 13:25:11.414061 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:11 crc kubenswrapper[4690]: I0320 13:25:11.414123 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:11 crc kubenswrapper[4690]: E0320 13:25:11.414283 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:11 crc kubenswrapper[4690]: I0320 13:25:11.414064 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:11 crc kubenswrapper[4690]: E0320 13:25:11.414499 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:11 crc kubenswrapper[4690]: E0320 13:25:11.414935 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:12 crc kubenswrapper[4690]: I0320 13:25:12.414537 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:12 crc kubenswrapper[4690]: E0320 13:25:12.414988 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:13 crc kubenswrapper[4690]: I0320 13:25:13.413731 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:13 crc kubenswrapper[4690]: I0320 13:25:13.413806 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:13 crc kubenswrapper[4690]: E0320 13:25:13.413927 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:13 crc kubenswrapper[4690]: I0320 13:25:13.413827 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:13 crc kubenswrapper[4690]: E0320 13:25:13.413982 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:13 crc kubenswrapper[4690]: E0320 13:25:13.414093 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.414322 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:14 crc kubenswrapper[4690]: E0320 13:25:14.414495 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.482625 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=58.482597592 podStartE2EDuration="58.482597592s" podCreationTimestamp="2026-03-20 13:24:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.466328711 +0000 UTC m=+160.755928664" watchObservedRunningTime="2026-03-20 13:25:14.482597592 +0000 UTC m=+160.772197545" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.517340 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-t4t2r" podStartSLOduration=112.517312291 podStartE2EDuration="1m52.517312291s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.516767566 +0000 UTC m=+160.806367559" watchObservedRunningTime="2026-03-20 13:25:14.517312291 +0000 UTC m=+160.806912294" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.517825 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-qnv4x" podStartSLOduration=112.517778304 podStartE2EDuration="1m52.517778304s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.48329435 +0000 UTC m=+160.772894303" watchObservedRunningTime="2026-03-20 13:25:14.517778304 +0000 UTC m=+160.807378287" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.557726 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-lgtw8" podStartSLOduration=112.557700024 podStartE2EDuration="1m52.557700024s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.557009805 +0000 UTC m=+160.846609768" watchObservedRunningTime="2026-03-20 13:25:14.557700024 +0000 UTC m=+160.847299987" Mar 20 13:25:14 crc kubenswrapper[4690]: E0320 13:25:14.592711 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.592891 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-pgtf4" podStartSLOduration=112.592826855 podStartE2EDuration="1m52.592826855s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.574716825 +0000 UTC m=+160.864316798" watchObservedRunningTime="2026-03-20 13:25:14.592826855 +0000 UTC m=+160.882426808" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.611490 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=36.6114708 podStartE2EDuration="36.6114708s" podCreationTimestamp="2026-03-20 13:24:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.59708534 +0000 UTC m=+160.886685283" watchObservedRunningTime="2026-03-20 13:25:14.6114708 +0000 UTC m=+160.901070753" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.702034 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-hq77p" podStartSLOduration=112.7020133 podStartE2EDuration="1m52.7020133s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.701008953 +0000 UTC m=+160.990608896" watchObservedRunningTime="2026-03-20 13:25:14.7020133 +0000 UTC m=+160.991613253" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.727615 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=57.727595042 podStartE2EDuration="57.727595042s" podCreationTimestamp="2026-03-20 13:24:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.727070818 +0000 UTC m=+161.016670761" watchObservedRunningTime="2026-03-20 13:25:14.727595042 +0000 UTC m=+161.017194985" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.727746 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=22.727741976 podStartE2EDuration="22.727741976s" podCreationTimestamp="2026-03-20 13:24:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.710773437 +0000 UTC m=+161.000373380" watchObservedRunningTime="2026-03-20 13:25:14.727741976 +0000 UTC m=+161.017341919" Mar 20 13:25:14 crc kubenswrapper[4690]: I0320 13:25:14.753126 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=19.753107273 podStartE2EDuration="19.753107273s" podCreationTimestamp="2026-03-20 13:24:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.738755004 +0000 UTC m=+161.028354947" watchObservedRunningTime="2026-03-20 13:25:14.753107273 +0000 UTC m=+161.042707296" Mar 20 13:25:15 crc kubenswrapper[4690]: I0320 13:25:15.414032 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:15 crc kubenswrapper[4690]: I0320 13:25:15.414061 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:15 crc kubenswrapper[4690]: I0320 13:25:15.414086 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:15 crc kubenswrapper[4690]: E0320 13:25:15.414168 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:15 crc kubenswrapper[4690]: E0320 13:25:15.414282 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:15 crc kubenswrapper[4690]: E0320 13:25:15.414483 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.154568 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.154629 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.154646 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.154667 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.154684 4690 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-03-20T13:25:16Z","lastTransitionTime":"2026-03-20T13:25:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.214261 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podStartSLOduration=114.214244288 podStartE2EDuration="1m54.214244288s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:14.75298585 +0000 UTC m=+161.042585793" watchObservedRunningTime="2026-03-20 13:25:16.214244288 +0000 UTC m=+162.503844231" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.214576 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq"] Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.214999 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.219672 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.219756 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.220224 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.220474 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.404521 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.404697 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.404789 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.404821 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.404916 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.413666 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:16 crc kubenswrapper[4690]: E0320 13:25:16.413956 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.451526 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.460724 4690 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.505962 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.506134 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.506203 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.506250 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.506265 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.506321 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.506401 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.506976 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-service-ca\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.512654 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.526910 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-hprdq\" (UID: \"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:16 crc kubenswrapper[4690]: I0320 13:25:16.536393 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" Mar 20 13:25:17 crc kubenswrapper[4690]: I0320 13:25:17.207512 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" event={"ID":"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6","Type":"ContainerStarted","Data":"1a27970d564b51d1701a32621fb41403cc5403a21ef815be421d54d2f9081737"} Mar 20 13:25:17 crc kubenswrapper[4690]: I0320 13:25:17.207881 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" event={"ID":"65f1ea33-38ba-42dc-b5d4-7f8f97cf8df6","Type":"ContainerStarted","Data":"212f844598c1526b542c5326245b1cfcda1f6939b6e30ef011781eb9c33cce14"} Mar 20 13:25:17 crc kubenswrapper[4690]: I0320 13:25:17.224970 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-hprdq" podStartSLOduration=115.224945303 podStartE2EDuration="1m55.224945303s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:17.223056642 +0000 UTC m=+163.512656605" watchObservedRunningTime="2026-03-20 13:25:17.224945303 +0000 UTC m=+163.514545276" Mar 20 13:25:17 crc kubenswrapper[4690]: I0320 13:25:17.414086 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:17 crc kubenswrapper[4690]: I0320 13:25:17.414133 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:17 crc kubenswrapper[4690]: I0320 13:25:17.414178 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:17 crc kubenswrapper[4690]: E0320 13:25:17.414227 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:17 crc kubenswrapper[4690]: E0320 13:25:17.414294 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:17 crc kubenswrapper[4690]: E0320 13:25:17.414447 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:18 crc kubenswrapper[4690]: I0320 13:25:18.413900 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:18 crc kubenswrapper[4690]: E0320 13:25:18.414116 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:19 crc kubenswrapper[4690]: I0320 13:25:19.414413 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:19 crc kubenswrapper[4690]: I0320 13:25:19.414473 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:19 crc kubenswrapper[4690]: E0320 13:25:19.414558 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:19 crc kubenswrapper[4690]: I0320 13:25:19.414601 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:19 crc kubenswrapper[4690]: E0320 13:25:19.414819 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:19 crc kubenswrapper[4690]: E0320 13:25:19.414942 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:19 crc kubenswrapper[4690]: E0320 13:25:19.594687 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:20 crc kubenswrapper[4690]: I0320 13:25:20.413684 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:20 crc kubenswrapper[4690]: E0320 13:25:20.413923 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:21 crc kubenswrapper[4690]: I0320 13:25:21.414038 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:21 crc kubenswrapper[4690]: I0320 13:25:21.414130 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:21 crc kubenswrapper[4690]: I0320 13:25:21.414050 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:21 crc kubenswrapper[4690]: E0320 13:25:21.414216 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:21 crc kubenswrapper[4690]: E0320 13:25:21.414539 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:21 crc kubenswrapper[4690]: E0320 13:25:21.414655 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:21 crc kubenswrapper[4690]: I0320 13:25:21.414946 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:25:21 crc kubenswrapper[4690]: E0320 13:25:21.415116 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:25:22 crc kubenswrapper[4690]: I0320 13:25:22.414024 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:22 crc kubenswrapper[4690]: E0320 13:25:22.414230 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:23 crc kubenswrapper[4690]: I0320 13:25:23.414430 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:23 crc kubenswrapper[4690]: I0320 13:25:23.414473 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:23 crc kubenswrapper[4690]: E0320 13:25:23.414584 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:23 crc kubenswrapper[4690]: I0320 13:25:23.414610 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:23 crc kubenswrapper[4690]: E0320 13:25:23.414758 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:23 crc kubenswrapper[4690]: E0320 13:25:23.414836 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:24 crc kubenswrapper[4690]: I0320 13:25:24.414384 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:24 crc kubenswrapper[4690]: E0320 13:25:24.417109 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:24 crc kubenswrapper[4690]: E0320 13:25:24.595357 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:25 crc kubenswrapper[4690]: I0320 13:25:25.414150 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:25 crc kubenswrapper[4690]: E0320 13:25:25.414328 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:25 crc kubenswrapper[4690]: I0320 13:25:25.414386 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:25 crc kubenswrapper[4690]: I0320 13:25:25.414446 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:25 crc kubenswrapper[4690]: E0320 13:25:25.414547 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:25 crc kubenswrapper[4690]: E0320 13:25:25.414699 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:26 crc kubenswrapper[4690]: I0320 13:25:26.414377 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:26 crc kubenswrapper[4690]: E0320 13:25:26.414563 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:27 crc kubenswrapper[4690]: I0320 13:25:27.413550 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:27 crc kubenswrapper[4690]: I0320 13:25:27.413596 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:27 crc kubenswrapper[4690]: I0320 13:25:27.413555 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:27 crc kubenswrapper[4690]: E0320 13:25:27.413689 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:27 crc kubenswrapper[4690]: E0320 13:25:27.413739 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:27 crc kubenswrapper[4690]: E0320 13:25:27.413784 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:28 crc kubenswrapper[4690]: I0320 13:25:28.414431 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:28 crc kubenswrapper[4690]: E0320 13:25:28.414650 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:29 crc kubenswrapper[4690]: I0320 13:25:29.414406 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:29 crc kubenswrapper[4690]: I0320 13:25:29.414447 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:29 crc kubenswrapper[4690]: I0320 13:25:29.414486 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:29 crc kubenswrapper[4690]: E0320 13:25:29.414612 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:29 crc kubenswrapper[4690]: E0320 13:25:29.414932 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:29 crc kubenswrapper[4690]: E0320 13:25:29.414796 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:29 crc kubenswrapper[4690]: E0320 13:25:29.596913 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:30 crc kubenswrapper[4690]: I0320 13:25:30.414080 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:30 crc kubenswrapper[4690]: E0320 13:25:30.414263 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:31 crc kubenswrapper[4690]: I0320 13:25:31.414397 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:31 crc kubenswrapper[4690]: I0320 13:25:31.414415 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:31 crc kubenswrapper[4690]: E0320 13:25:31.415349 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:31 crc kubenswrapper[4690]: E0320 13:25:31.415403 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:31 crc kubenswrapper[4690]: I0320 13:25:31.414534 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:31 crc kubenswrapper[4690]: E0320 13:25:31.415475 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:32 crc kubenswrapper[4690]: I0320 13:25:32.413529 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:32 crc kubenswrapper[4690]: E0320 13:25:32.414157 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:32 crc kubenswrapper[4690]: I0320 13:25:32.414706 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:25:32 crc kubenswrapper[4690]: E0320 13:25:32.414993 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-x2b7f_openshift-ovn-kubernetes(874238ac-6c4c-40c9-ad22-1bec31020fb6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" Mar 20 13:25:33 crc kubenswrapper[4690]: I0320 13:25:33.414376 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:33 crc kubenswrapper[4690]: I0320 13:25:33.414450 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:33 crc kubenswrapper[4690]: I0320 13:25:33.414376 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:33 crc kubenswrapper[4690]: E0320 13:25:33.414569 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:33 crc kubenswrapper[4690]: E0320 13:25:33.414496 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:33 crc kubenswrapper[4690]: E0320 13:25:33.414672 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:34 crc kubenswrapper[4690]: I0320 13:25:34.415863 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:34 crc kubenswrapper[4690]: E0320 13:25:34.415983 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:34 crc kubenswrapper[4690]: E0320 13:25:34.597822 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:35 crc kubenswrapper[4690]: I0320 13:25:35.413585 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:35 crc kubenswrapper[4690]: I0320 13:25:35.413680 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:35 crc kubenswrapper[4690]: E0320 13:25:35.413746 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:35 crc kubenswrapper[4690]: I0320 13:25:35.413704 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:35 crc kubenswrapper[4690]: E0320 13:25:35.413975 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:35 crc kubenswrapper[4690]: E0320 13:25:35.414174 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:36 crc kubenswrapper[4690]: I0320 13:25:36.413501 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:36 crc kubenswrapper[4690]: E0320 13:25:36.413626 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:37 crc kubenswrapper[4690]: I0320 13:25:37.276000 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/1.log" Mar 20 13:25:37 crc kubenswrapper[4690]: I0320 13:25:37.276760 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/0.log" Mar 20 13:25:37 crc kubenswrapper[4690]: I0320 13:25:37.276860 4690 generic.go:334] "Generic (PLEG): container finished" podID="d83a0d76-2d76-4202-a2f1-42b9ccb66802" containerID="95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42" exitCode=1 Mar 20 13:25:37 crc kubenswrapper[4690]: I0320 13:25:37.276898 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgtf4" event={"ID":"d83a0d76-2d76-4202-a2f1-42b9ccb66802","Type":"ContainerDied","Data":"95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42"} Mar 20 13:25:37 crc kubenswrapper[4690]: I0320 13:25:37.276934 4690 scope.go:117] "RemoveContainer" containerID="574161de880d62df12108d6d5aafc23ff9152def9ab8a0e2d2f81e0be9995d9a" Mar 20 13:25:37 crc kubenswrapper[4690]: I0320 13:25:37.277308 4690 scope.go:117] "RemoveContainer" containerID="95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42" Mar 20 13:25:37 crc kubenswrapper[4690]: E0320 13:25:37.277470 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-pgtf4_openshift-multus(d83a0d76-2d76-4202-a2f1-42b9ccb66802)\"" pod="openshift-multus/multus-pgtf4" podUID="d83a0d76-2d76-4202-a2f1-42b9ccb66802" Mar 20 13:25:37 crc kubenswrapper[4690]: I0320 13:25:37.413956 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:37 crc kubenswrapper[4690]: I0320 13:25:37.414023 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:37 crc kubenswrapper[4690]: I0320 13:25:37.414044 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:37 crc kubenswrapper[4690]: E0320 13:25:37.414095 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:37 crc kubenswrapper[4690]: E0320 13:25:37.414161 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:37 crc kubenswrapper[4690]: E0320 13:25:37.414227 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:38 crc kubenswrapper[4690]: I0320 13:25:38.282237 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/1.log" Mar 20 13:25:38 crc kubenswrapper[4690]: I0320 13:25:38.414288 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:38 crc kubenswrapper[4690]: E0320 13:25:38.414475 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:39 crc kubenswrapper[4690]: I0320 13:25:39.414153 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:39 crc kubenswrapper[4690]: I0320 13:25:39.414168 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:39 crc kubenswrapper[4690]: E0320 13:25:39.414315 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:39 crc kubenswrapper[4690]: E0320 13:25:39.414382 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:39 crc kubenswrapper[4690]: I0320 13:25:39.414182 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:39 crc kubenswrapper[4690]: E0320 13:25:39.414464 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:39 crc kubenswrapper[4690]: E0320 13:25:39.599424 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:40 crc kubenswrapper[4690]: I0320 13:25:40.414319 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:40 crc kubenswrapper[4690]: E0320 13:25:40.414457 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:41 crc kubenswrapper[4690]: I0320 13:25:41.413399 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:41 crc kubenswrapper[4690]: I0320 13:25:41.413460 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:41 crc kubenswrapper[4690]: I0320 13:25:41.413476 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:41 crc kubenswrapper[4690]: E0320 13:25:41.413568 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:41 crc kubenswrapper[4690]: E0320 13:25:41.413726 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:41 crc kubenswrapper[4690]: E0320 13:25:41.413835 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:42 crc kubenswrapper[4690]: I0320 13:25:42.413996 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:42 crc kubenswrapper[4690]: E0320 13:25:42.414240 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:43 crc kubenswrapper[4690]: I0320 13:25:43.413676 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:43 crc kubenswrapper[4690]: E0320 13:25:43.413809 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:43 crc kubenswrapper[4690]: I0320 13:25:43.414056 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:43 crc kubenswrapper[4690]: E0320 13:25:43.414118 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:43 crc kubenswrapper[4690]: I0320 13:25:43.414257 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:43 crc kubenswrapper[4690]: E0320 13:25:43.414751 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:44 crc kubenswrapper[4690]: I0320 13:25:44.413954 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:44 crc kubenswrapper[4690]: E0320 13:25:44.415953 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:44 crc kubenswrapper[4690]: I0320 13:25:44.417501 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:25:44 crc kubenswrapper[4690]: E0320 13:25:44.600725 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:45 crc kubenswrapper[4690]: I0320 13:25:45.303654 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/3.log" Mar 20 13:25:45 crc kubenswrapper[4690]: I0320 13:25:45.307194 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerStarted","Data":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} Mar 20 13:25:45 crc kubenswrapper[4690]: I0320 13:25:45.307603 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:25:45 crc kubenswrapper[4690]: I0320 13:25:45.348042 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podStartSLOduration=143.348017048 podStartE2EDuration="2m23.348017048s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:25:45.346819966 +0000 UTC m=+191.636419919" watchObservedRunningTime="2026-03-20 13:25:45.348017048 +0000 UTC m=+191.637617031" Mar 20 13:25:45 crc kubenswrapper[4690]: I0320 13:25:45.393882 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rpcmp"] Mar 20 13:25:45 crc kubenswrapper[4690]: I0320 13:25:45.393979 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:45 crc kubenswrapper[4690]: E0320 13:25:45.394074 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:45 crc kubenswrapper[4690]: I0320 13:25:45.413886 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:45 crc kubenswrapper[4690]: I0320 13:25:45.413972 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:45 crc kubenswrapper[4690]: E0320 13:25:45.414020 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:45 crc kubenswrapper[4690]: I0320 13:25:45.413910 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:45 crc kubenswrapper[4690]: E0320 13:25:45.414165 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:45 crc kubenswrapper[4690]: E0320 13:25:45.414242 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:47 crc kubenswrapper[4690]: I0320 13:25:47.414266 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:47 crc kubenswrapper[4690]: I0320 13:25:47.414402 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:47 crc kubenswrapper[4690]: E0320 13:25:47.414424 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:47 crc kubenswrapper[4690]: I0320 13:25:47.414439 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:47 crc kubenswrapper[4690]: E0320 13:25:47.414580 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:47 crc kubenswrapper[4690]: E0320 13:25:47.414618 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:47 crc kubenswrapper[4690]: I0320 13:25:47.415069 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:47 crc kubenswrapper[4690]: E0320 13:25:47.415241 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:49 crc kubenswrapper[4690]: I0320 13:25:49.413595 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:49 crc kubenswrapper[4690]: I0320 13:25:49.413666 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:49 crc kubenswrapper[4690]: I0320 13:25:49.413677 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:49 crc kubenswrapper[4690]: I0320 13:25:49.413631 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:49 crc kubenswrapper[4690]: E0320 13:25:49.413826 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:49 crc kubenswrapper[4690]: E0320 13:25:49.413902 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:49 crc kubenswrapper[4690]: E0320 13:25:49.414042 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:49 crc kubenswrapper[4690]: E0320 13:25:49.414260 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:49 crc kubenswrapper[4690]: I0320 13:25:49.414542 4690 scope.go:117] "RemoveContainer" containerID="95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42" Mar 20 13:25:49 crc kubenswrapper[4690]: E0320 13:25:49.601991 4690 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Mar 20 13:25:50 crc kubenswrapper[4690]: I0320 13:25:50.329249 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/1.log" Mar 20 13:25:50 crc kubenswrapper[4690]: I0320 13:25:50.329651 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgtf4" event={"ID":"d83a0d76-2d76-4202-a2f1-42b9ccb66802","Type":"ContainerStarted","Data":"a65e2d6bdf44779e477455b6e6eba4c7902d801329959f6e6286873f8adc0d79"} Mar 20 13:25:51 crc kubenswrapper[4690]: I0320 13:25:51.414022 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:51 crc kubenswrapper[4690]: I0320 13:25:51.414245 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:51 crc kubenswrapper[4690]: E0320 13:25:51.414262 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:51 crc kubenswrapper[4690]: I0320 13:25:51.414318 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:51 crc kubenswrapper[4690]: I0320 13:25:51.414335 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:51 crc kubenswrapper[4690]: E0320 13:25:51.414469 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:51 crc kubenswrapper[4690]: E0320 13:25:51.414621 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:51 crc kubenswrapper[4690]: E0320 13:25:51.414769 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:53 crc kubenswrapper[4690]: I0320 13:25:53.413739 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:53 crc kubenswrapper[4690]: I0320 13:25:53.413798 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:53 crc kubenswrapper[4690]: I0320 13:25:53.413830 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:53 crc kubenswrapper[4690]: I0320 13:25:53.413933 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:53 crc kubenswrapper[4690]: E0320 13:25:53.414125 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-rpcmp" podUID="d459decc-f715-4636-bc35-963ae8133ec7" Mar 20 13:25:53 crc kubenswrapper[4690]: E0320 13:25:53.414201 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Mar 20 13:25:53 crc kubenswrapper[4690]: E0320 13:25:53.414313 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Mar 20 13:25:53 crc kubenswrapper[4690]: E0320 13:25:53.414447 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.414154 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.414173 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.414169 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.414192 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.417434 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.417487 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.417638 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.417674 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.417732 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Mar 20 13:25:55 crc kubenswrapper[4690]: I0320 13:25:55.417863 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.131549 4690 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.179394 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kw6wc"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.180131 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.180147 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.181817 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.182561 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6c8pc"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.187390 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.189708 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.190926 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.191470 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.196537 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rj8zv"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.196928 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.197415 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.197737 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.197835 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.197911 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.197862 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.198012 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.198281 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.199032 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.199982 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.200278 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.201903 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-8gsqv"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.202596 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.207724 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.208444 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.211376 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.219462 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.232989 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.233443 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.233689 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.234246 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.234378 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.234607 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.235387 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-7vkfd"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.236222 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7vkfd" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.236993 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.237343 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.237531 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.237739 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.237956 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.238091 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5t8t9"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.238994 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.239059 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.239151 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.239063 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.239349 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.240381 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.241127 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.241331 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.241436 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.241519 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.241593 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.241681 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.241802 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.241956 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.242037 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.242157 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.250783 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.251131 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.251261 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.251413 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.253944 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.254236 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.254247 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.254449 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.254608 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.254677 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.254690 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.254808 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.254907 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.255066 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.255538 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.255658 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.255808 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.255931 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.255997 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.258137 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.258792 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.259531 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fdlk7"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.259964 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.260062 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.260401 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.260560 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.261828 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.262167 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.266572 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-h2jxx"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.271819 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.272099 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.272722 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.272869 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.272912 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.272972 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273079 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82b92810-2e1d-4ddb-962f-09351da4b297-config\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273167 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82b92810-2e1d-4ddb-962f-09351da4b297-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273196 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fc9761f-215c-4784-9b76-073fae1106a5-config\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273209 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273330 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhwgx\" (UniqueName: \"kubernetes.io/projected/5c8072fb-34b5-4c43-895b-9ccb724b9199-kube-api-access-hhwgx\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273473 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82b92810-2e1d-4ddb-962f-09351da4b297-serving-cert\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273533 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-serving-cert\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273752 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp887\" (UniqueName: \"kubernetes.io/projected/74952b15-473b-462f-a05f-6c00433ed4d5-kube-api-access-mp887\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273775 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0fc9761f-215c-4784-9b76-073fae1106a5-machine-approver-tls\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273790 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-service-ca\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273815 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-console-config\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273828 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-trusted-ca-bundle\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273869 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-config\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.273968 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9ffg\" (UniqueName: \"kubernetes.io/projected/0fc9761f-215c-4784-9b76-073fae1106a5-kube-api-access-l9ffg\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274011 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c8072fb-34b5-4c43-895b-9ccb724b9199-serving-cert\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274018 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274074 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274133 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82b92810-2e1d-4ddb-962f-09351da4b297-service-ca-bundle\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274083 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274162 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-client-ca\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274195 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274232 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-oauth-serving-cert\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274247 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-oauth-config\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274326 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2zdz\" (UniqueName: \"kubernetes.io/projected/82b92810-2e1d-4ddb-962f-09351da4b297-kube-api-access-c2zdz\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.274392 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0fc9761f-215c-4784-9b76-073fae1106a5-auth-proxy-config\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.275176 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.275311 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.275427 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.275636 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.276087 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.276171 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.276243 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.276423 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.276653 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.277034 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.280738 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.281023 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.284244 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.285227 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-st5qs"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.285819 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.288034 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.288496 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.288966 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.289987 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.292387 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2xhf5"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.298005 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.299391 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.301296 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.301517 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.301952 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.302519 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.305991 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkvkz"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.307041 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.308119 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.308374 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.318353 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.318620 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.320283 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.320710 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.321480 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.321778 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.321989 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.341984 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.342304 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.344058 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kw6wc"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.344927 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.345771 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.346054 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.346759 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.346902 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.347128 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.348329 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.348898 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.348932 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.349071 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.349084 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.349652 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.351035 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.353843 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.354175 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.354738 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.361302 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.361976 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.361990 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.370300 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.370499 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-kl5sr"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.371079 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rj8zv"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.371169 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.371183 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.373602 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.378479 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.378527 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stm9g\" (UniqueName: \"kubernetes.io/projected/345c7db2-4067-402c-bddf-3a497a9540c2-kube-api-access-stm9g\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.378558 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.378584 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.378609 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjpr6\" (UniqueName: \"kubernetes.io/projected/4874f99f-2938-475f-872a-c7a794ae4818-kube-api-access-qjpr6\") pod \"downloads-7954f5f757-7vkfd\" (UID: \"4874f99f-2938-475f-872a-c7a794ae4818\") " pod="openshift-console/downloads-7954f5f757-7vkfd" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380676 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rdlxz\" (UniqueName: \"kubernetes.io/projected/19d07388-56ad-4bb6-bacb-2eec91c18aa8-kube-api-access-rdlxz\") pod \"control-plane-machine-set-operator-78cbb6b69f-vcp4n\" (UID: \"19d07388-56ad-4bb6-bacb-2eec91c18aa8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380743 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82b92810-2e1d-4ddb-962f-09351da4b297-config\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380771 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82b92810-2e1d-4ddb-962f-09351da4b297-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380792 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fc9761f-215c-4784-9b76-073fae1106a5-config\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380813 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhwgx\" (UniqueName: \"kubernetes.io/projected/5c8072fb-34b5-4c43-895b-9ccb724b9199-kube-api-access-hhwgx\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380838 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a1ebb7a8-ac20-491f-b670-afb617d1e060-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380884 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82b92810-2e1d-4ddb-962f-09351da4b297-serving-cert\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380907 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aae7d681-bfbe-4280-92b9-f117157b6be8-audit-dir\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380930 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-serving-cert\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.380982 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a1ebb7a8-ac20-491f-b670-afb617d1e060-encryption-config\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.381485 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0fc9761f-215c-4784-9b76-073fae1106a5-config\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.381992 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82b92810-2e1d-4ddb-962f-09351da4b297-config\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382022 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/aae7d681-bfbe-4280-92b9-f117157b6be8-node-pullsecrets\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382074 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382103 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2sz4\" (UniqueName: \"kubernetes.io/projected/4694c58d-e630-4eff-a677-d13aca00fcab-kube-api-access-q2sz4\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382134 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382154 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382171 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1963bdcc-e63d-4227-8f00-8c3900996a30-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-gwlgr\" (UID: \"1963bdcc-e63d-4227-8f00-8c3900996a30\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382186 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ccgf\" (UniqueName: \"kubernetes.io/projected/e26c3b04-aca8-46b5-a653-8257d4458d5f-kube-api-access-6ccgf\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382204 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/27c73b63-ed31-4aae-bc66-5b4707f469f5-images\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382221 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5n8vt\" (UniqueName: \"kubernetes.io/projected/27c73b63-ed31-4aae-bc66-5b4707f469f5-kube-api-access-5n8vt\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382240 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-config\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382255 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e26c3b04-aca8-46b5-a653-8257d4458d5f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382277 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e26c3b04-aca8-46b5-a653-8257d4458d5f-proxy-tls\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382292 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6623fd0e-357f-459b-8e04-6da531bf7b7b-config\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382312 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp887\" (UniqueName: \"kubernetes.io/projected/74952b15-473b-462f-a05f-6c00433ed4d5-kube-api-access-mp887\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382323 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82b92810-2e1d-4ddb-962f-09351da4b297-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382330 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpmrl\" (UniqueName: \"kubernetes.io/projected/f323c12d-88be-4cc8-908f-adad081907d2-kube-api-access-hpmrl\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pppm\" (UID: \"f323c12d-88be-4cc8-908f-adad081907d2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382378 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0fc9761f-215c-4784-9b76-073fae1106a5-machine-approver-tls\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382415 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbnlw\" (UniqueName: \"kubernetes.io/projected/4703b44a-9eae-47ce-83bf-e2b66d4b3d91-kube-api-access-xbnlw\") pod \"migrator-59844c95c7-hbdxb\" (UID: \"4703b44a-9eae-47ce-83bf-e2b66d4b3d91\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382433 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-llbhr\" (UniqueName: \"kubernetes.io/projected/6623fd0e-357f-459b-8e04-6da531bf7b7b-kube-api-access-llbhr\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382456 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-service-ca\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382472 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382511 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c230027-5b5a-4c39-8594-c09c36112ab8-serving-cert\") pod \"openshift-config-operator-7777fb866f-mn9nn\" (UID: \"4c230027-5b5a-4c39-8594-c09c36112ab8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382532 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-config\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382553 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d671e62e-b720-47ba-b4eb-e93671f7e327-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mcccn\" (UID: \"d671e62e-b720-47ba-b4eb-e93671f7e327\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382572 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-config\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.382588 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383358 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-service-ca\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383447 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383493 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1ebb7a8-ac20-491f-b670-afb617d1e060-serving-cert\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383564 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/19d07388-56ad-4bb6-bacb-2eec91c18aa8-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-vcp4n\" (UID: \"19d07388-56ad-4bb6-bacb-2eec91c18aa8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383620 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-console-config\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383643 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-trusted-ca-bundle\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383702 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a1ebb7a8-ac20-491f-b670-afb617d1e060-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383728 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-etcd-serving-ca\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383752 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f323c12d-88be-4cc8-908f-adad081907d2-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pppm\" (UID: \"f323c12d-88be-4cc8-908f-adad081907d2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.383776 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/27c73b63-ed31-4aae-bc66-5b4707f469f5-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.384666 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-trusted-ca-bundle\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.384954 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f79ce18-3787-4106-882d-73271ab0018d-metrics-tls\") pod \"dns-operator-744455d44c-st5qs\" (UID: \"8f79ce18-3787-4106-882d-73271ab0018d\") " pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.384987 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95727575-c5bb-4eb1-9f36-29d2acc3f7ce-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pd6xp\" (UID: \"95727575-c5bb-4eb1-9f36-29d2acc3f7ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385011 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6623fd0e-357f-459b-8e04-6da531bf7b7b-etcd-service-ca\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385043 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-audit-policies\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385087 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d671e62e-b720-47ba-b4eb-e93671f7e327-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mcccn\" (UID: \"d671e62e-b720-47ba-b4eb-e93671f7e327\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385120 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-image-import-ca\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385138 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aae7d681-bfbe-4280-92b9-f117157b6be8-serving-cert\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385170 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f323c12d-88be-4cc8-908f-adad081907d2-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pppm\" (UID: \"f323c12d-88be-4cc8-908f-adad081907d2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385191 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-client-ca\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385236 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-config\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385260 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkkp5\" (UniqueName: \"kubernetes.io/projected/4c230027-5b5a-4c39-8594-c09c36112ab8-kube-api-access-dkkp5\") pod \"openshift-config-operator-7777fb866f-mn9nn\" (UID: \"4c230027-5b5a-4c39-8594-c09c36112ab8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.385281 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6623fd0e-357f-459b-8e04-6da531bf7b7b-serving-cert\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387217 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6623fd0e-357f-459b-8e04-6da531bf7b7b-etcd-client\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387250 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4c230027-5b5a-4c39-8594-c09c36112ab8-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mn9nn\" (UID: \"4c230027-5b5a-4c39-8594-c09c36112ab8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387275 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-trusted-ca\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387294 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/aae7d681-bfbe-4280-92b9-f117157b6be8-encryption-config\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387310 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27c73b63-ed31-4aae-bc66-5b4707f469f5-config\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387359 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9ffg\" (UniqueName: \"kubernetes.io/projected/0fc9761f-215c-4784-9b76-073fae1106a5-kube-api-access-l9ffg\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387395 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c8072fb-34b5-4c43-895b-9ccb724b9199-serving-cert\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387442 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387469 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwrgl\" (UniqueName: \"kubernetes.io/projected/8f79ce18-3787-4106-882d-73271ab0018d-kube-api-access-pwrgl\") pod \"dns-operator-744455d44c-st5qs\" (UID: \"8f79ce18-3787-4106-882d-73271ab0018d\") " pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387493 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82b92810-2e1d-4ddb-962f-09351da4b297-service-ca-bundle\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387534 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-client-ca\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387552 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a1ebb7a8-ac20-491f-b670-afb617d1e060-audit-policies\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387570 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95727575-c5bb-4eb1-9f36-29d2acc3f7ce-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pd6xp\" (UID: \"95727575-c5bb-4eb1-9f36-29d2acc3f7ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387589 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387608 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387624 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-serving-cert\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387672 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s77cb\" (UniqueName: \"kubernetes.io/projected/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-kube-api-access-s77cb\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387690 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387704 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a1ebb7a8-ac20-491f-b670-afb617d1e060-etcd-client\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387720 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/aae7d681-bfbe-4280-92b9-f117157b6be8-etcd-client\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387739 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387761 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4694c58d-e630-4eff-a677-d13aca00fcab-audit-dir\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387777 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-oauth-config\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.387979 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-oauth-serving-cert\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388035 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98q4k\" (UniqueName: \"kubernetes.io/projected/d671e62e-b720-47ba-b4eb-e93671f7e327-kube-api-access-98q4k\") pod \"openshift-apiserver-operator-796bbdcf4f-mcccn\" (UID: \"d671e62e-b720-47ba-b4eb-e93671f7e327\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388051 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a1ebb7a8-ac20-491f-b670-afb617d1e060-audit-dir\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388070 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95727575-c5bb-4eb1-9f36-29d2acc3f7ce-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pd6xp\" (UID: \"95727575-c5bb-4eb1-9f36-29d2acc3f7ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388105 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2zdz\" (UniqueName: \"kubernetes.io/projected/82b92810-2e1d-4ddb-962f-09351da4b297-kube-api-access-c2zdz\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388123 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0fc9761f-215c-4784-9b76-073fae1106a5-auth-proxy-config\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388137 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-audit\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388157 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpcwn\" (UniqueName: \"kubernetes.io/projected/1963bdcc-e63d-4227-8f00-8c3900996a30-kube-api-access-vpcwn\") pod \"cluster-samples-operator-665b6dd947-gwlgr\" (UID: \"1963bdcc-e63d-4227-8f00-8c3900996a30\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388173 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e26c3b04-aca8-46b5-a653-8257d4458d5f-images\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388187 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6623fd0e-357f-459b-8e04-6da531bf7b7b-etcd-ca\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388205 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c8qtm\" (UniqueName: \"kubernetes.io/projected/aae7d681-bfbe-4280-92b9-f117157b6be8-kube-api-access-c8qtm\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388223 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/345c7db2-4067-402c-bddf-3a497a9540c2-serving-cert\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388239 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvg5v\" (UniqueName: \"kubernetes.io/projected/a1ebb7a8-ac20-491f-b670-afb617d1e060-kube-api-access-vvg5v\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388397 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388874 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388893 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5t8t9"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388906 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7vkfd"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388917 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388931 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-oauth-serving-cert\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.388951 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.389319 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/82b92810-2e1d-4ddb-962f-09351da4b297-service-ca-bundle\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.389420 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/0fc9761f-215c-4784-9b76-073fae1106a5-machine-approver-tls\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.391336 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-console-config\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.391493 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.391720 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.392864 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-oauth-config\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.393408 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.393518 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.393818 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-st5qs"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.393879 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-g69d4"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.394017 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0fc9761f-215c-4784-9b76-073fae1106a5-auth-proxy-config\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.394153 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.394613 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-client-ca\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.395776 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.396052 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.397559 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.400128 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/82b92810-2e1d-4ddb-962f-09351da4b297-serving-cert\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.400720 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c8072fb-34b5-4c43-895b-9ccb724b9199-serving-cert\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.403976 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.406926 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hv779"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.408062 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.409649 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.409909 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-config\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.424150 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.424884 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.424271 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.425438 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.425788 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.426263 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.427086 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.428191 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.428571 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.430074 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.430392 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-serving-cert\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.431435 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5mwrz"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.432581 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.434581 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-8gsqv"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.437323 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.437901 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-69ksw"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.438827 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.440024 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.441448 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2xhf5"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.442375 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.443252 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.443876 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.444927 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkvkz"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.446019 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fdlk7"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.447217 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-h2jxx"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.448393 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6c8pc"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.450534 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.451648 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.452686 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.453667 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.454889 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-tqft4"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.456123 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-v5r8p"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.456316 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-tqft4" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.456522 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.457609 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.458593 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.459560 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.460556 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.461742 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.462363 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.463328 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.464558 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.466008 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-g69d4"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.466756 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.467856 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hv779"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.468900 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.469900 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-tqft4"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.471410 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-69ksw"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.472585 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5mwrz"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.473442 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.474603 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-l547r"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.475145 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-l547r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.475741 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-l547r"] Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.482196 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.488971 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4694c58d-e630-4eff-a677-d13aca00fcab-audit-dir\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489012 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95727575-c5bb-4eb1-9f36-29d2acc3f7ce-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pd6xp\" (UID: \"95727575-c5bb-4eb1-9f36-29d2acc3f7ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489046 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98q4k\" (UniqueName: \"kubernetes.io/projected/d671e62e-b720-47ba-b4eb-e93671f7e327-kube-api-access-98q4k\") pod \"openshift-apiserver-operator-796bbdcf4f-mcccn\" (UID: \"d671e62e-b720-47ba-b4eb-e93671f7e327\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489069 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a1ebb7a8-ac20-491f-b670-afb617d1e060-audit-dir\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489045 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4694c58d-e630-4eff-a677-d13aca00fcab-audit-dir\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489129 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-audit\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489159 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6623fd0e-357f-459b-8e04-6da531bf7b7b-etcd-ca\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489163 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a1ebb7a8-ac20-491f-b670-afb617d1e060-audit-dir\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489183 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpcwn\" (UniqueName: \"kubernetes.io/projected/1963bdcc-e63d-4227-8f00-8c3900996a30-kube-api-access-vpcwn\") pod \"cluster-samples-operator-665b6dd947-gwlgr\" (UID: \"1963bdcc-e63d-4227-8f00-8c3900996a30\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489202 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e26c3b04-aca8-46b5-a653-8257d4458d5f-images\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489222 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvg5v\" (UniqueName: \"kubernetes.io/projected/a1ebb7a8-ac20-491f-b670-afb617d1e060-kube-api-access-vvg5v\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489239 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c8qtm\" (UniqueName: \"kubernetes.io/projected/aae7d681-bfbe-4280-92b9-f117157b6be8-kube-api-access-c8qtm\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489257 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/345c7db2-4067-402c-bddf-3a497a9540c2-serving-cert\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489277 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489299 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stm9g\" (UniqueName: \"kubernetes.io/projected/345c7db2-4067-402c-bddf-3a497a9540c2-kube-api-access-stm9g\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489317 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjpr6\" (UniqueName: \"kubernetes.io/projected/4874f99f-2938-475f-872a-c7a794ae4818-kube-api-access-qjpr6\") pod \"downloads-7954f5f757-7vkfd\" (UID: \"4874f99f-2938-475f-872a-c7a794ae4818\") " pod="openshift-console/downloads-7954f5f757-7vkfd" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489337 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdlxz\" (UniqueName: \"kubernetes.io/projected/19d07388-56ad-4bb6-bacb-2eec91c18aa8-kube-api-access-rdlxz\") pod \"control-plane-machine-set-operator-78cbb6b69f-vcp4n\" (UID: \"19d07388-56ad-4bb6-bacb-2eec91c18aa8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489357 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489376 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489399 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a1ebb7a8-ac20-491f-b670-afb617d1e060-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489416 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aae7d681-bfbe-4280-92b9-f117157b6be8-audit-dir\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489432 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a1ebb7a8-ac20-491f-b670-afb617d1e060-encryption-config\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489447 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/aae7d681-bfbe-4280-92b9-f117157b6be8-node-pullsecrets\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489466 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489482 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2sz4\" (UniqueName: \"kubernetes.io/projected/4694c58d-e630-4eff-a677-d13aca00fcab-kube-api-access-q2sz4\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489497 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ccgf\" (UniqueName: \"kubernetes.io/projected/e26c3b04-aca8-46b5-a653-8257d4458d5f-kube-api-access-6ccgf\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489517 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489533 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489549 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1963bdcc-e63d-4227-8f00-8c3900996a30-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-gwlgr\" (UID: \"1963bdcc-e63d-4227-8f00-8c3900996a30\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489563 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e26c3b04-aca8-46b5-a653-8257d4458d5f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489579 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/27c73b63-ed31-4aae-bc66-5b4707f469f5-images\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489596 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5n8vt\" (UniqueName: \"kubernetes.io/projected/27c73b63-ed31-4aae-bc66-5b4707f469f5-kube-api-access-5n8vt\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489611 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-config\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489636 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpmrl\" (UniqueName: \"kubernetes.io/projected/f323c12d-88be-4cc8-908f-adad081907d2-kube-api-access-hpmrl\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pppm\" (UID: \"f323c12d-88be-4cc8-908f-adad081907d2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489656 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e26c3b04-aca8-46b5-a653-8257d4458d5f-proxy-tls\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489665 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-audit\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489673 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6623fd0e-357f-459b-8e04-6da531bf7b7b-config\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489693 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbnlw\" (UniqueName: \"kubernetes.io/projected/4703b44a-9eae-47ce-83bf-e2b66d4b3d91-kube-api-access-xbnlw\") pod \"migrator-59844c95c7-hbdxb\" (UID: \"4703b44a-9eae-47ce-83bf-e2b66d4b3d91\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489710 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-llbhr\" (UniqueName: \"kubernetes.io/projected/6623fd0e-357f-459b-8e04-6da531bf7b7b-kube-api-access-llbhr\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489729 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489746 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c230027-5b5a-4c39-8594-c09c36112ab8-serving-cert\") pod \"openshift-config-operator-7777fb866f-mn9nn\" (UID: \"4c230027-5b5a-4c39-8594-c09c36112ab8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489763 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-config\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489779 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-config\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489794 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489809 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d671e62e-b720-47ba-b4eb-e93671f7e327-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mcccn\" (UID: \"d671e62e-b720-47ba-b4eb-e93671f7e327\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489843 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489875 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1ebb7a8-ac20-491f-b670-afb617d1e060-serving-cert\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489891 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/19d07388-56ad-4bb6-bacb-2eec91c18aa8-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-vcp4n\" (UID: \"19d07388-56ad-4bb6-bacb-2eec91c18aa8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489934 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-etcd-serving-ca\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489950 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f323c12d-88be-4cc8-908f-adad081907d2-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pppm\" (UID: \"f323c12d-88be-4cc8-908f-adad081907d2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489967 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/27c73b63-ed31-4aae-bc66-5b4707f469f5-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.489986 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a1ebb7a8-ac20-491f-b670-afb617d1e060-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490001 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95727575-c5bb-4eb1-9f36-29d2acc3f7ce-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pd6xp\" (UID: \"95727575-c5bb-4eb1-9f36-29d2acc3f7ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490016 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6623fd0e-357f-459b-8e04-6da531bf7b7b-etcd-service-ca\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490030 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f79ce18-3787-4106-882d-73271ab0018d-metrics-tls\") pod \"dns-operator-744455d44c-st5qs\" (UID: \"8f79ce18-3787-4106-882d-73271ab0018d\") " pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490047 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-audit-policies\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490064 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d671e62e-b720-47ba-b4eb-e93671f7e327-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mcccn\" (UID: \"d671e62e-b720-47ba-b4eb-e93671f7e327\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490079 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-image-import-ca\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490099 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aae7d681-bfbe-4280-92b9-f117157b6be8-serving-cert\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490072 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/aae7d681-bfbe-4280-92b9-f117157b6be8-audit-dir\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490779 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a1ebb7a8-ac20-491f-b670-afb617d1e060-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.490121 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f323c12d-88be-4cc8-908f-adad081907d2-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pppm\" (UID: \"f323c12d-88be-4cc8-908f-adad081907d2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.491026 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-client-ca\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.491062 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkkp5\" (UniqueName: \"kubernetes.io/projected/4c230027-5b5a-4c39-8594-c09c36112ab8-kube-api-access-dkkp5\") pod \"openshift-config-operator-7777fb866f-mn9nn\" (UID: \"4c230027-5b5a-4c39-8594-c09c36112ab8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.491083 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6623fd0e-357f-459b-8e04-6da531bf7b7b-config\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.491091 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6623fd0e-357f-459b-8e04-6da531bf7b7b-serving-cert\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.491091 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f323c12d-88be-4cc8-908f-adad081907d2-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pppm\" (UID: \"f323c12d-88be-4cc8-908f-adad081907d2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.491155 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e26c3b04-aca8-46b5-a653-8257d4458d5f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.491584 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.491993 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-etcd-serving-ca\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.492088 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/27c73b63-ed31-4aae-bc66-5b4707f469f5-images\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.492328 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-config\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.492608 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-client-ca\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.493148 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a1ebb7a8-ac20-491f-b670-afb617d1e060-encryption-config\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.493607 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-trusted-ca-bundle\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.493643 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494170 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494321 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/aae7d681-bfbe-4280-92b9-f117157b6be8-node-pullsecrets\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494428 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4c230027-5b5a-4c39-8594-c09c36112ab8-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mn9nn\" (UID: \"4c230027-5b5a-4c39-8594-c09c36112ab8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494469 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6623fd0e-357f-459b-8e04-6da531bf7b7b-etcd-client\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494514 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-trusted-ca\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494539 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/aae7d681-bfbe-4280-92b9-f117157b6be8-encryption-config\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494549 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f323c12d-88be-4cc8-908f-adad081907d2-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pppm\" (UID: \"f323c12d-88be-4cc8-908f-adad081907d2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494562 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27c73b63-ed31-4aae-bc66-5b4707f469f5-config\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494596 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwrgl\" (UniqueName: \"kubernetes.io/projected/8f79ce18-3787-4106-882d-73271ab0018d-kube-api-access-pwrgl\") pod \"dns-operator-744455d44c-st5qs\" (UID: \"8f79ce18-3787-4106-882d-73271ab0018d\") " pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494709 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a1ebb7a8-ac20-491f-b670-afb617d1e060-audit-policies\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494744 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95727575-c5bb-4eb1-9f36-29d2acc3f7ce-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pd6xp\" (UID: \"95727575-c5bb-4eb1-9f36-29d2acc3f7ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494780 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.494949 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495129 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d671e62e-b720-47ba-b4eb-e93671f7e327-config\") pod \"openshift-apiserver-operator-796bbdcf4f-mcccn\" (UID: \"d671e62e-b720-47ba-b4eb-e93671f7e327\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495150 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/8f79ce18-3787-4106-882d-73271ab0018d-metrics-tls\") pod \"dns-operator-744455d44c-st5qs\" (UID: \"8f79ce18-3787-4106-882d-73271ab0018d\") " pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495180 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-audit-policies\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495598 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27c73b63-ed31-4aae-bc66-5b4707f469f5-config\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495639 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-serving-cert\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495673 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s77cb\" (UniqueName: \"kubernetes.io/projected/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-kube-api-access-s77cb\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495701 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495726 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a1ebb7a8-ac20-491f-b670-afb617d1e060-etcd-client\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495746 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-config\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495751 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/aae7d681-bfbe-4280-92b9-f117157b6be8-etcd-client\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495768 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495877 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/27c73b63-ed31-4aae-bc66-5b4707f469f5-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495986 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-config\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.495988 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.496975 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/4c230027-5b5a-4c39-8594-c09c36112ab8-available-featuregates\") pod \"openshift-config-operator-7777fb866f-mn9nn\" (UID: \"4c230027-5b5a-4c39-8594-c09c36112ab8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.497181 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a1ebb7a8-ac20-491f-b670-afb617d1e060-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.497417 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/aae7d681-bfbe-4280-92b9-f117157b6be8-encryption-config\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.497418 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-trusted-ca\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.497499 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/aae7d681-bfbe-4280-92b9-f117157b6be8-image-import-ca\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.497510 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a1ebb7a8-ac20-491f-b670-afb617d1e060-audit-policies\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.497784 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.497820 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.498301 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/aae7d681-bfbe-4280-92b9-f117157b6be8-etcd-client\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.498510 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.498585 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d671e62e-b720-47ba-b4eb-e93671f7e327-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-mcccn\" (UID: \"d671e62e-b720-47ba-b4eb-e93671f7e327\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.499130 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/345c7db2-4067-402c-bddf-3a497a9540c2-serving-cert\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.499184 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/aae7d681-bfbe-4280-92b9-f117157b6be8-serving-cert\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.499564 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a1ebb7a8-ac20-491f-b670-afb617d1e060-serving-cert\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.499808 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.500271 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a1ebb7a8-ac20-491f-b670-afb617d1e060-etcd-client\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.500375 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-serving-cert\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.500398 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c230027-5b5a-4c39-8594-c09c36112ab8-serving-cert\") pod \"openshift-config-operator-7777fb866f-mn9nn\" (UID: \"4c230027-5b5a-4c39-8594-c09c36112ab8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.501142 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.501197 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1963bdcc-e63d-4227-8f00-8c3900996a30-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-gwlgr\" (UID: \"1963bdcc-e63d-4227-8f00-8c3900996a30\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.502609 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.502735 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.507001 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/6623fd0e-357f-459b-8e04-6da531bf7b7b-etcd-service-ca\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.522913 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.543308 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.562951 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.587211 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.602489 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.622819 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.627953 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6623fd0e-357f-459b-8e04-6da531bf7b7b-serving-cert\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.643255 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.662921 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.683236 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.702190 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.708640 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/6623fd0e-357f-459b-8e04-6da531bf7b7b-etcd-client\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.722841 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.731146 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/6623fd0e-357f-459b-8e04-6da531bf7b7b-etcd-ca\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.763019 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.768612 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/19d07388-56ad-4bb6-bacb-2eec91c18aa8-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-vcp4n\" (UID: \"19d07388-56ad-4bb6-bacb-2eec91c18aa8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.783959 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.803390 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.810559 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/95727575-c5bb-4eb1-9f36-29d2acc3f7ce-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pd6xp\" (UID: \"95727575-c5bb-4eb1-9f36-29d2acc3f7ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.823202 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.843080 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.862508 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.871124 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/95727575-c5bb-4eb1-9f36-29d2acc3f7ce-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pd6xp\" (UID: \"95727575-c5bb-4eb1-9f36-29d2acc3f7ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.883032 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.903693 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.923245 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.948927 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.962684 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Mar 20 13:25:57 crc kubenswrapper[4690]: I0320 13:25:57.983684 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.002998 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.022938 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.042697 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.062810 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.082833 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.103749 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.123133 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.148509 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.163143 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.183301 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.191546 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e26c3b04-aca8-46b5-a653-8257d4458d5f-images\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.203047 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.214538 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e26c3b04-aca8-46b5-a653-8257d4458d5f-proxy-tls\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.222889 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.280969 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhwgx\" (UniqueName: \"kubernetes.io/projected/5c8072fb-34b5-4c43-895b-9ccb724b9199-kube-api-access-hhwgx\") pod \"controller-manager-879f6c89f-fdlk7\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.300540 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp887\" (UniqueName: \"kubernetes.io/projected/74952b15-473b-462f-a05f-6c00433ed4d5-kube-api-access-mp887\") pod \"console-f9d7485db-h2jxx\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.316326 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9ffg\" (UniqueName: \"kubernetes.io/projected/0fc9761f-215c-4784-9b76-073fae1106a5-kube-api-access-l9ffg\") pod \"machine-approver-56656f9798-f6j8r\" (UID: \"0fc9761f-215c-4784-9b76-073fae1106a5\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.337196 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2zdz\" (UniqueName: \"kubernetes.io/projected/82b92810-2e1d-4ddb-962f-09351da4b297-kube-api-access-c2zdz\") pod \"authentication-operator-69f744f599-kw6wc\" (UID: \"82b92810-2e1d-4ddb-962f-09351da4b297\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.342837 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.353194 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.358305 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.363396 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.383699 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.401638 4690 request.go:700] Waited for 1.009690262s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/secrets?fieldSelector=metadata.name%3Dpprof-cert&limit=500&resourceVersion=0 Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.404084 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.422365 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.438078 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.442805 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.464105 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.503156 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.504919 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.505867 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.523123 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.543006 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.563106 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.582814 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.603946 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.622978 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.643467 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.663993 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.667038 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-kw6wc"] Mar 20 13:25:58 crc kubenswrapper[4690]: W0320 13:25:58.677867 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82b92810_2e1d_4ddb_962f_09351da4b297.slice/crio-fdc7de8ca6c4bf0b98a4f9265d9863e84c286f80886a0b007ea65115ebfd6884 WatchSource:0}: Error finding container fdc7de8ca6c4bf0b98a4f9265d9863e84c286f80886a0b007ea65115ebfd6884: Status 404 returned error can't find the container with id fdc7de8ca6c4bf0b98a4f9265d9863e84c286f80886a0b007ea65115ebfd6884 Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.682131 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.704329 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.722765 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.743513 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.762211 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.782961 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.802529 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.822481 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fdlk7"] Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.822772 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Mar 20 13:25:58 crc kubenswrapper[4690]: W0320 13:25:58.827797 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c8072fb_34b5_4c43_895b_9ccb724b9199.slice/crio-924f96e428f8c8e0d81f9a458c48d52a832d10aee0bf7a70f5ff11f75527c6e1 WatchSource:0}: Error finding container 924f96e428f8c8e0d81f9a458c48d52a832d10aee0bf7a70f5ff11f75527c6e1: Status 404 returned error can't find the container with id 924f96e428f8c8e0d81f9a458c48d52a832d10aee0bf7a70f5ff11f75527c6e1 Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.836681 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-h2jxx"] Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.842659 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.863595 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.882437 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.903721 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.922909 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.942660 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.963243 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Mar 20 13:25:58 crc kubenswrapper[4690]: I0320 13:25:58.983569 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.003130 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.022836 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.042547 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.073169 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.082573 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.103453 4690 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.123438 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.143395 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.162801 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.183347 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.202417 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.223478 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.243126 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.262789 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.282987 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.303667 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.322390 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.343946 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.367896 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" event={"ID":"82b92810-2e1d-4ddb-962f-09351da4b297","Type":"ContainerStarted","Data":"bb4302d44bd73d7a1e0b0e8b2995b913e0b359e41a0888a9ae53a85f6af68809"} Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.367952 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" event={"ID":"82b92810-2e1d-4ddb-962f-09351da4b297","Type":"ContainerStarted","Data":"fdc7de8ca6c4bf0b98a4f9265d9863e84c286f80886a0b007ea65115ebfd6884"} Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.370495 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h2jxx" event={"ID":"74952b15-473b-462f-a05f-6c00433ed4d5","Type":"ContainerStarted","Data":"f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2"} Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.370531 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h2jxx" event={"ID":"74952b15-473b-462f-a05f-6c00433ed4d5","Type":"ContainerStarted","Data":"6c815e07f71eaef0334600e27b15dcb578b4717bf3326d08155cd22c1ad3b139"} Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.373565 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" event={"ID":"0fc9761f-215c-4784-9b76-073fae1106a5","Type":"ContainerStarted","Data":"5a0b481a469fb3768c23f4ae8276039c0f522e555a57f0a5b6c04806770467fc"} Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.373607 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" event={"ID":"0fc9761f-215c-4784-9b76-073fae1106a5","Type":"ContainerStarted","Data":"9ab1a7405a8432567aaafed01514dd6c97e817355e3187fda5b0950761e04919"} Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.373625 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" event={"ID":"0fc9761f-215c-4784-9b76-073fae1106a5","Type":"ContainerStarted","Data":"3ab3cc37d8c7e30092da04bc6df283f32eb52488b94405091e815faeadb56d92"} Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.375353 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" event={"ID":"5c8072fb-34b5-4c43-895b-9ccb724b9199","Type":"ContainerStarted","Data":"b58f46ebf351c60d412a292be702091397054d895ee8ff16acb322fb00d89d2d"} Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.375382 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" event={"ID":"5c8072fb-34b5-4c43-895b-9ccb724b9199","Type":"ContainerStarted","Data":"924f96e428f8c8e0d81f9a458c48d52a832d10aee0bf7a70f5ff11f75527c6e1"} Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.375564 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.377023 4690 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-fdlk7 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.377077 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" podUID="5c8072fb-34b5-4c43-895b-9ccb724b9199" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.384282 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98q4k\" (UniqueName: \"kubernetes.io/projected/d671e62e-b720-47ba-b4eb-e93671f7e327-kube-api-access-98q4k\") pod \"openshift-apiserver-operator-796bbdcf4f-mcccn\" (UID: \"d671e62e-b720-47ba-b4eb-e93671f7e327\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.398112 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpcwn\" (UniqueName: \"kubernetes.io/projected/1963bdcc-e63d-4227-8f00-8c3900996a30-kube-api-access-vpcwn\") pod \"cluster-samples-operator-665b6dd947-gwlgr\" (UID: \"1963bdcc-e63d-4227-8f00-8c3900996a30\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.402998 4690 request.go:700] Waited for 1.912984675s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager-operator/serviceaccounts/openshift-controller-manager-operator/token Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.424004 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpmrl\" (UniqueName: \"kubernetes.io/projected/f323c12d-88be-4cc8-908f-adad081907d2-kube-api-access-hpmrl\") pod \"openshift-controller-manager-operator-756b6f6bc6-7pppm\" (UID: \"f323c12d-88be-4cc8-908f-adad081907d2\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.437840 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvg5v\" (UniqueName: \"kubernetes.io/projected/a1ebb7a8-ac20-491f-b670-afb617d1e060-kube-api-access-vvg5v\") pod \"apiserver-7bbb656c7d-m8bdh\" (UID: \"a1ebb7a8-ac20-491f-b670-afb617d1e060\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.457266 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2sz4\" (UniqueName: \"kubernetes.io/projected/4694c58d-e630-4eff-a677-d13aca00fcab-kube-api-access-q2sz4\") pod \"oauth-openshift-558db77b4-rj8zv\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.471610 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.478658 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stm9g\" (UniqueName: \"kubernetes.io/projected/345c7db2-4067-402c-bddf-3a497a9540c2-kube-api-access-stm9g\") pod \"route-controller-manager-6576b87f9c-p4fhp\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.483138 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.498482 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjpr6\" (UniqueName: \"kubernetes.io/projected/4874f99f-2938-475f-872a-c7a794ae4818-kube-api-access-qjpr6\") pod \"downloads-7954f5f757-7vkfd\" (UID: \"4874f99f-2938-475f-872a-c7a794ae4818\") " pod="openshift-console/downloads-7954f5f757-7vkfd" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.524544 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-7vkfd" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.538920 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.541948 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdlxz\" (UniqueName: \"kubernetes.io/projected/19d07388-56ad-4bb6-bacb-2eec91c18aa8-kube-api-access-rdlxz\") pod \"control-plane-machine-set-operator-78cbb6b69f-vcp4n\" (UID: \"19d07388-56ad-4bb6-bacb-2eec91c18aa8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.548138 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5n8vt\" (UniqueName: \"kubernetes.io/projected/27c73b63-ed31-4aae-bc66-5b4707f469f5-kube-api-access-5n8vt\") pod \"machine-api-operator-5694c8668f-8gsqv\" (UID: \"27c73b63-ed31-4aae-bc66-5b4707f469f5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.567535 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbnlw\" (UniqueName: \"kubernetes.io/projected/4703b44a-9eae-47ce-83bf-e2b66d4b3d91-kube-api-access-xbnlw\") pod \"migrator-59844c95c7-hbdxb\" (UID: \"4703b44a-9eae-47ce-83bf-e2b66d4b3d91\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.579226 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.608983 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkkp5\" (UniqueName: \"kubernetes.io/projected/4c230027-5b5a-4c39-8594-c09c36112ab8-kube-api-access-dkkp5\") pod \"openshift-config-operator-7777fb866f-mn9nn\" (UID: \"4c230027-5b5a-4c39-8594-c09c36112ab8\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.624875 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/95727575-c5bb-4eb1-9f36-29d2acc3f7ce-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-pd6xp\" (UID: \"95727575-c5bb-4eb1-9f36-29d2acc3f7ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.628379 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-llbhr\" (UniqueName: \"kubernetes.io/projected/6623fd0e-357f-459b-8e04-6da531bf7b7b-kube-api-access-llbhr\") pod \"etcd-operator-b45778765-2xhf5\" (UID: \"6623fd0e-357f-459b-8e04-6da531bf7b7b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.630608 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.643784 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.644199 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.649454 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwrgl\" (UniqueName: \"kubernetes.io/projected/8f79ce18-3787-4106-882d-73271ab0018d-kube-api-access-pwrgl\") pod \"dns-operator-744455d44c-st5qs\" (UID: \"8f79ce18-3787-4106-882d-73271ab0018d\") " pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.661109 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.668901 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ccgf\" (UniqueName: \"kubernetes.io/projected/e26c3b04-aca8-46b5-a653-8257d4458d5f-kube-api-access-6ccgf\") pod \"machine-config-operator-74547568cd-xfzzm\" (UID: \"e26c3b04-aca8-46b5-a653-8257d4458d5f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.680123 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c8qtm\" (UniqueName: \"kubernetes.io/projected/aae7d681-bfbe-4280-92b9-f117157b6be8-kube-api-access-c8qtm\") pod \"apiserver-76f77b778f-6c8pc\" (UID: \"aae7d681-bfbe-4280-92b9-f117157b6be8\") " pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.681058 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.701629 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s77cb\" (UniqueName: \"kubernetes.io/projected/973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1-kube-api-access-s77cb\") pod \"console-operator-58897d9998-5t8t9\" (UID: \"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1\") " pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.734587 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn"] Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.748077 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.764083 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.824562 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr"] Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.829154 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.831782 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-tls\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.831822 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3bc6fdb3-ebc0-4d1f-b004-8e54086dc349-signing-cabundle\") pod \"service-ca-9c57cc56f-hv779\" (UID: \"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349\") " pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.832117 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bsrgj\" (UniqueName: \"kubernetes.io/projected/617c74e7-0a16-4376-822f-390d3c44c7c5-kube-api-access-bsrgj\") pod \"marketplace-operator-79b997595-5mwrz\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.832138 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5mwrz\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.832153 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a980fba9-5b4e-4042-9e79-b816bac0bc19-cert\") pod \"ingress-canary-l547r\" (UID: \"a980fba9-5b4e-4042-9e79-b816bac0bc19\") " pod="openshift-ingress-canary/ingress-canary-l547r" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.832168 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/93bf028e-dcbf-4438-bc7a-2003c9f2e88a-srv-cert\") pod \"olm-operator-6b444d44fb-hxnj4\" (UID: \"93bf028e-dcbf-4438-bc7a-2003c9f2e88a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.832221 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/23ed3e8b-fcc1-446e-bb52-863602c42c6d-metrics-certs\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833115 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4cdf5de-0994-48c7-967a-6271fdd5e023-config\") pod \"service-ca-operator-777779d784-dv9wc\" (UID: \"f4cdf5de-0994-48c7-967a-6271fdd5e023\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833152 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/068d0d7d-4bf9-4019-87da-451c2554d6d3-tmpfs\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833167 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0b631f25-1a33-4d3f-9cd6-c932781e8c8b-metrics-tls\") pod \"dns-default-tqft4\" (UID: \"0b631f25-1a33-4d3f-9cd6-c932781e8c8b\") " pod="openshift-dns/dns-default-tqft4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833319 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-pd59k\" (UID: \"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833356 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ck594\" (UniqueName: \"kubernetes.io/projected/248801a4-0271-4a76-93eb-efea07c28a24-kube-api-access-ck594\") pod \"multus-admission-controller-857f4d67dd-g69d4\" (UID: \"248801a4-0271-4a76-93eb-efea07c28a24\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833386 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/343731ce-b08c-47db-a6d1-16fa2278c711-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-st6cj\" (UID: \"343731ce-b08c-47db-a6d1-16fa2278c711\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833410 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbmzx\" (UniqueName: \"kubernetes.io/projected/3bc6fdb3-ebc0-4d1f-b004-8e54086dc349-kube-api-access-kbmzx\") pod \"service-ca-9c57cc56f-hv779\" (UID: \"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349\") " pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833468 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftcvk\" (UniqueName: \"kubernetes.io/projected/f4cdf5de-0994-48c7-967a-6271fdd5e023-kube-api-access-ftcvk\") pod \"service-ca-operator-777779d784-dv9wc\" (UID: \"f4cdf5de-0994-48c7-967a-6271fdd5e023\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833114 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm"] Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833558 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-registration-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833587 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b631f25-1a33-4d3f-9cd6-c932781e8c8b-config-volume\") pod \"dns-default-tqft4\" (UID: \"0b631f25-1a33-4d3f-9cd6-c932781e8c8b\") " pod="openshift-dns/dns-default-tqft4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833625 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/23ed3e8b-fcc1-446e-bb52-863602c42c6d-stats-auth\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.833705 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ec3e325e-1789-4918-9ebf-dabed8ba1408-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jrwdq\" (UID: \"ec3e325e-1789-4918-9ebf-dabed8ba1408\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.834737 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/343731ce-b08c-47db-a6d1-16fa2278c711-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-st6cj\" (UID: \"343731ce-b08c-47db-a6d1-16fa2278c711\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.835497 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzn4d\" (UniqueName: \"kubernetes.io/projected/8f36c2e7-4cd4-4491-9cd9-824a6917db82-kube-api-access-mzn4d\") pod \"catalog-operator-68c6474976-vks5p\" (UID: \"8f36c2e7-4cd4-4491-9cd9-824a6917db82\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.835551 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5ae042df-e0db-4bd4-b519-22c2ab7ac732-metrics-tls\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.835590 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbdvx\" (UniqueName: \"kubernetes.io/projected/5ae042df-e0db-4bd4-b519-22c2ab7ac732-kube-api-access-rbdvx\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.837354 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/23ed3e8b-fcc1-446e-bb52-863602c42c6d-default-certificate\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.837528 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.838327 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2kcn\" (UniqueName: \"kubernetes.io/projected/068d0d7d-4bf9-4019-87da-451c2554d6d3-kube-api-access-q2kcn\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.838359 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-socket-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.838382 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5m9q\" (UniqueName: \"kubernetes.io/projected/501636dd-fcee-43af-84c5-56774cc6c48e-kube-api-access-k5m9q\") pod \"kube-storage-version-migrator-operator-b67b599dd-gftvr\" (UID: \"501636dd-fcee-43af-84c5-56774cc6c48e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.838456 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-csi-data-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.838493 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ec3e325e-1789-4918-9ebf-dabed8ba1408-proxy-tls\") pod \"machine-config-controller-84d6567774-jrwdq\" (UID: \"ec3e325e-1789-4918-9ebf-dabed8ba1408\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.838558 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53349f20-095b-4c88-b827-f3d6d09c15fc-secret-volume\") pod \"collect-profiles-29566875-g76qr\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.838588 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/93bf028e-dcbf-4438-bc7a-2003c9f2e88a-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hxnj4\" (UID: \"93bf028e-dcbf-4438-bc7a-2003c9f2e88a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.839296 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/704eed42-1e9b-4d8c-be9f-4d237658ae86-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.839344 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/068d0d7d-4bf9-4019-87da-451c2554d6d3-apiservice-cert\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.839391 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xppjl\" (UniqueName: \"kubernetes.io/projected/53349f20-095b-4c88-b827-f3d6d09c15fc-kube-api-access-xppjl\") pod \"collect-profiles-29566875-g76qr\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.839407 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/068d0d7d-4bf9-4019-87da-451c2554d6d3-webhook-cert\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.839424 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-mountpoint-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.839440 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hflt6\" (UniqueName: \"kubernetes.io/projected/7f3e5030-65a9-4876-a146-7087d00a33ba-kube-api-access-hflt6\") pod \"package-server-manager-789f6589d5-rnpgp\" (UID: \"7f3e5030-65a9-4876-a146-7087d00a33ba\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.840342 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hvnx\" (UniqueName: \"kubernetes.io/projected/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-kube-api-access-8hvnx\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.840968 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0df460ff-0af9-41a3-ac1c-f06de540df23-node-bootstrap-token\") pod \"machine-config-server-v5r8p\" (UID: \"0df460ff-0af9-41a3-ac1c-f06de540df23\") " pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841089 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53349f20-095b-4c88-b827-f3d6d09c15fc-config-volume\") pod \"collect-profiles-29566875-g76qr\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841134 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: W0320 13:25:59.841247 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd671e62e_b720_47ba_b4eb_e93671f7e327.slice/crio-b856bb06a6e612396308f538830aa2943a3a9e67f82086744e583ec668e0af5d WatchSource:0}: Error finding container b856bb06a6e612396308f538830aa2943a3a9e67f82086744e583ec668e0af5d: Status 404 returned error can't find the container with id b856bb06a6e612396308f538830aa2943a3a9e67f82086744e583ec668e0af5d Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841277 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/501636dd-fcee-43af-84c5-56774cc6c48e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-gftvr\" (UID: \"501636dd-fcee-43af-84c5-56774cc6c48e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841386 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23ed3e8b-fcc1-446e-bb52-863602c42c6d-service-ca-bundle\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841417 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/248801a4-0271-4a76-93eb-efea07c28a24-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-g69d4\" (UID: \"248801a4-0271-4a76-93eb-efea07c28a24\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841447 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/501636dd-fcee-43af-84c5-56774cc6c48e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-gftvr\" (UID: \"501636dd-fcee-43af-84c5-56774cc6c48e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841465 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pvh2\" (UniqueName: \"kubernetes.io/projected/0b631f25-1a33-4d3f-9cd6-c932781e8c8b-kube-api-access-9pvh2\") pod \"dns-default-tqft4\" (UID: \"0b631f25-1a33-4d3f-9cd6-c932781e8c8b\") " pod="openshift-dns/dns-default-tqft4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841637 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841671 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8m7d\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-kube-api-access-r8m7d\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841688 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5ae042df-e0db-4bd4-b519-22c2ab7ac732-bound-sa-token\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841705 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4cdf5de-0994-48c7-967a-6271fdd5e023-serving-cert\") pod \"service-ca-operator-777779d784-dv9wc\" (UID: \"f4cdf5de-0994-48c7-967a-6271fdd5e023\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841723 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fpxs\" (UniqueName: \"kubernetes.io/projected/a980fba9-5b4e-4042-9e79-b816bac0bc19-kube-api-access-7fpxs\") pod \"ingress-canary-l547r\" (UID: \"a980fba9-5b4e-4042-9e79-b816bac0bc19\") " pod="openshift-ingress-canary/ingress-canary-l547r" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841888 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-certificates\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841944 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-bound-sa-token\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841964 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5mwrz\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.841981 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f3e5030-65a9-4876-a146-7087d00a33ba-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rnpgp\" (UID: \"7f3e5030-65a9-4876-a146-7087d00a33ba\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.842007 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343731ce-b08c-47db-a6d1-16fa2278c711-config\") pod \"kube-controller-manager-operator-78b949d7b-st6cj\" (UID: \"343731ce-b08c-47db-a6d1-16fa2278c711\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.842063 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8f36c2e7-4cd4-4491-9cd9-824a6917db82-profile-collector-cert\") pod \"catalog-operator-68c6474976-vks5p\" (UID: \"8f36c2e7-4cd4-4491-9cd9-824a6917db82\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.842717 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-trusted-ca\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.842740 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn7n6\" (UniqueName: \"kubernetes.io/projected/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-kube-api-access-tn7n6\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: E0320 13:25:59.842773 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:00.342751967 +0000 UTC m=+206.632351910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.842804 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8f36c2e7-4cd4-4491-9cd9-824a6917db82-srv-cert\") pod \"catalog-operator-68c6474976-vks5p\" (UID: \"8f36c2e7-4cd4-4491-9cd9-824a6917db82\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.842834 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5ae042df-e0db-4bd4-b519-22c2ab7ac732-trusted-ca\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.843208 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3bc6fdb3-ebc0-4d1f-b004-8e54086dc349-signing-key\") pod \"service-ca-9c57cc56f-hv779\" (UID: \"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349\") " pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.843446 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-plugins-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.843512 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nqwl2\" (UniqueName: \"kubernetes.io/projected/0df460ff-0af9-41a3-ac1c-f06de540df23-kube-api-access-nqwl2\") pod \"machine-config-server-v5r8p\" (UID: \"0df460ff-0af9-41a3-ac1c-f06de540df23\") " pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.843617 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0df460ff-0af9-41a3-ac1c-f06de540df23-certs\") pod \"machine-config-server-v5r8p\" (UID: \"0df460ff-0af9-41a3-ac1c-f06de540df23\") " pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.843801 4690 ???:1] "http: TLS handshake error from 192.168.126.11:53598: no serving certificate available for the kubelet" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.843925 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/704eed42-1e9b-4d8c-be9f-4d237658ae86-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.843986 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.844113 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5qxv\" (UniqueName: \"kubernetes.io/projected/23ed3e8b-fcc1-446e-bb52-863602c42c6d-kube-api-access-f5qxv\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.846283 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52mt5\" (UniqueName: \"kubernetes.io/projected/ec3e325e-1789-4918-9ebf-dabed8ba1408-kube-api-access-52mt5\") pod \"machine-config-controller-84d6567774-jrwdq\" (UID: \"ec3e325e-1789-4918-9ebf-dabed8ba1408\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.851722 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.852398 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-phttf\" (UniqueName: \"kubernetes.io/projected/93bf028e-dcbf-4438-bc7a-2003c9f2e88a-kube-api-access-phttf\") pod \"olm-operator-6b444d44fb-hxnj4\" (UID: \"93bf028e-dcbf-4438-bc7a-2003c9f2e88a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.852450 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-pd59k\" (UID: \"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.852533 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c-config\") pod \"kube-apiserver-operator-766d6c64bb-pd59k\" (UID: \"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.865348 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.891528 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.948288 4690 ???:1] "http: TLS handshake error from 192.168.126.11:53614: no serving certificate available for the kubelet" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.953620 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954174 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/704eed42-1e9b-4d8c-be9f-4d237658ae86-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954211 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954259 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5qxv\" (UniqueName: \"kubernetes.io/projected/23ed3e8b-fcc1-446e-bb52-863602c42c6d-kube-api-access-f5qxv\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954284 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52mt5\" (UniqueName: \"kubernetes.io/projected/ec3e325e-1789-4918-9ebf-dabed8ba1408-kube-api-access-52mt5\") pod \"machine-config-controller-84d6567774-jrwdq\" (UID: \"ec3e325e-1789-4918-9ebf-dabed8ba1408\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954309 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-phttf\" (UniqueName: \"kubernetes.io/projected/93bf028e-dcbf-4438-bc7a-2003c9f2e88a-kube-api-access-phttf\") pod \"olm-operator-6b444d44fb-hxnj4\" (UID: \"93bf028e-dcbf-4438-bc7a-2003c9f2e88a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954333 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c-config\") pod \"kube-apiserver-operator-766d6c64bb-pd59k\" (UID: \"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954350 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-pd59k\" (UID: \"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954369 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-tls\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954387 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3bc6fdb3-ebc0-4d1f-b004-8e54086dc349-signing-cabundle\") pod \"service-ca-9c57cc56f-hv779\" (UID: \"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349\") " pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954406 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bsrgj\" (UniqueName: \"kubernetes.io/projected/617c74e7-0a16-4376-822f-390d3c44c7c5-kube-api-access-bsrgj\") pod \"marketplace-operator-79b997595-5mwrz\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954427 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5mwrz\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954444 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a980fba9-5b4e-4042-9e79-b816bac0bc19-cert\") pod \"ingress-canary-l547r\" (UID: \"a980fba9-5b4e-4042-9e79-b816bac0bc19\") " pod="openshift-ingress-canary/ingress-canary-l547r" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954460 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/93bf028e-dcbf-4438-bc7a-2003c9f2e88a-srv-cert\") pod \"olm-operator-6b444d44fb-hxnj4\" (UID: \"93bf028e-dcbf-4438-bc7a-2003c9f2e88a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954477 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/23ed3e8b-fcc1-446e-bb52-863602c42c6d-metrics-certs\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954495 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4cdf5de-0994-48c7-967a-6271fdd5e023-config\") pod \"service-ca-operator-777779d784-dv9wc\" (UID: \"f4cdf5de-0994-48c7-967a-6271fdd5e023\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954520 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/068d0d7d-4bf9-4019-87da-451c2554d6d3-tmpfs\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954541 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0b631f25-1a33-4d3f-9cd6-c932781e8c8b-metrics-tls\") pod \"dns-default-tqft4\" (UID: \"0b631f25-1a33-4d3f-9cd6-c932781e8c8b\") " pod="openshift-dns/dns-default-tqft4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954566 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-pd59k\" (UID: \"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954588 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ck594\" (UniqueName: \"kubernetes.io/projected/248801a4-0271-4a76-93eb-efea07c28a24-kube-api-access-ck594\") pod \"multus-admission-controller-857f4d67dd-g69d4\" (UID: \"248801a4-0271-4a76-93eb-efea07c28a24\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954618 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/343731ce-b08c-47db-a6d1-16fa2278c711-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-st6cj\" (UID: \"343731ce-b08c-47db-a6d1-16fa2278c711\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954644 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbmzx\" (UniqueName: \"kubernetes.io/projected/3bc6fdb3-ebc0-4d1f-b004-8e54086dc349-kube-api-access-kbmzx\") pod \"service-ca-9c57cc56f-hv779\" (UID: \"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349\") " pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954676 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftcvk\" (UniqueName: \"kubernetes.io/projected/f4cdf5de-0994-48c7-967a-6271fdd5e023-kube-api-access-ftcvk\") pod \"service-ca-operator-777779d784-dv9wc\" (UID: \"f4cdf5de-0994-48c7-967a-6271fdd5e023\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954711 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-registration-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954738 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b631f25-1a33-4d3f-9cd6-c932781e8c8b-config-volume\") pod \"dns-default-tqft4\" (UID: \"0b631f25-1a33-4d3f-9cd6-c932781e8c8b\") " pod="openshift-dns/dns-default-tqft4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954757 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/23ed3e8b-fcc1-446e-bb52-863602c42c6d-stats-auth\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954786 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ec3e325e-1789-4918-9ebf-dabed8ba1408-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jrwdq\" (UID: \"ec3e325e-1789-4918-9ebf-dabed8ba1408\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954810 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/343731ce-b08c-47db-a6d1-16fa2278c711-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-st6cj\" (UID: \"343731ce-b08c-47db-a6d1-16fa2278c711\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954834 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzn4d\" (UniqueName: \"kubernetes.io/projected/8f36c2e7-4cd4-4491-9cd9-824a6917db82-kube-api-access-mzn4d\") pod \"catalog-operator-68c6474976-vks5p\" (UID: \"8f36c2e7-4cd4-4491-9cd9-824a6917db82\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954870 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5ae042df-e0db-4bd4-b519-22c2ab7ac732-metrics-tls\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954889 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbdvx\" (UniqueName: \"kubernetes.io/projected/5ae042df-e0db-4bd4-b519-22c2ab7ac732-kube-api-access-rbdvx\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954907 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/23ed3e8b-fcc1-446e-bb52-863602c42c6d-default-certificate\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954927 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954946 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2kcn\" (UniqueName: \"kubernetes.io/projected/068d0d7d-4bf9-4019-87da-451c2554d6d3-kube-api-access-q2kcn\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954962 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-socket-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.954984 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5m9q\" (UniqueName: \"kubernetes.io/projected/501636dd-fcee-43af-84c5-56774cc6c48e-kube-api-access-k5m9q\") pod \"kube-storage-version-migrator-operator-b67b599dd-gftvr\" (UID: \"501636dd-fcee-43af-84c5-56774cc6c48e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955001 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-csi-data-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955022 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ec3e325e-1789-4918-9ebf-dabed8ba1408-proxy-tls\") pod \"machine-config-controller-84d6567774-jrwdq\" (UID: \"ec3e325e-1789-4918-9ebf-dabed8ba1408\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955047 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53349f20-095b-4c88-b827-f3d6d09c15fc-secret-volume\") pod \"collect-profiles-29566875-g76qr\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955064 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/93bf028e-dcbf-4438-bc7a-2003c9f2e88a-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hxnj4\" (UID: \"93bf028e-dcbf-4438-bc7a-2003c9f2e88a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955084 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/704eed42-1e9b-4d8c-be9f-4d237658ae86-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955104 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/068d0d7d-4bf9-4019-87da-451c2554d6d3-apiservice-cert\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955121 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xppjl\" (UniqueName: \"kubernetes.io/projected/53349f20-095b-4c88-b827-f3d6d09c15fc-kube-api-access-xppjl\") pod \"collect-profiles-29566875-g76qr\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955141 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/068d0d7d-4bf9-4019-87da-451c2554d6d3-webhook-cert\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955158 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-mountpoint-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955176 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hflt6\" (UniqueName: \"kubernetes.io/projected/7f3e5030-65a9-4876-a146-7087d00a33ba-kube-api-access-hflt6\") pod \"package-server-manager-789f6589d5-rnpgp\" (UID: \"7f3e5030-65a9-4876-a146-7087d00a33ba\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955201 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hvnx\" (UniqueName: \"kubernetes.io/projected/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-kube-api-access-8hvnx\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955223 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0df460ff-0af9-41a3-ac1c-f06de540df23-node-bootstrap-token\") pod \"machine-config-server-v5r8p\" (UID: \"0df460ff-0af9-41a3-ac1c-f06de540df23\") " pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955276 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53349f20-095b-4c88-b827-f3d6d09c15fc-config-volume\") pod \"collect-profiles-29566875-g76qr\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955295 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955315 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/501636dd-fcee-43af-84c5-56774cc6c48e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-gftvr\" (UID: \"501636dd-fcee-43af-84c5-56774cc6c48e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955337 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23ed3e8b-fcc1-446e-bb52-863602c42c6d-service-ca-bundle\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955355 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/248801a4-0271-4a76-93eb-efea07c28a24-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-g69d4\" (UID: \"248801a4-0271-4a76-93eb-efea07c28a24\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955373 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/501636dd-fcee-43af-84c5-56774cc6c48e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-gftvr\" (UID: \"501636dd-fcee-43af-84c5-56774cc6c48e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955391 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pvh2\" (UniqueName: \"kubernetes.io/projected/0b631f25-1a33-4d3f-9cd6-c932781e8c8b-kube-api-access-9pvh2\") pod \"dns-default-tqft4\" (UID: \"0b631f25-1a33-4d3f-9cd6-c932781e8c8b\") " pod="openshift-dns/dns-default-tqft4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955420 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8m7d\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-kube-api-access-r8m7d\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955469 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4cdf5de-0994-48c7-967a-6271fdd5e023-serving-cert\") pod \"service-ca-operator-777779d784-dv9wc\" (UID: \"f4cdf5de-0994-48c7-967a-6271fdd5e023\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:25:59 crc kubenswrapper[4690]: E0320 13:25:59.955499 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:00.455474503 +0000 UTC m=+206.745074446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955550 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fpxs\" (UniqueName: \"kubernetes.io/projected/a980fba9-5b4e-4042-9e79-b816bac0bc19-kube-api-access-7fpxs\") pod \"ingress-canary-l547r\" (UID: \"a980fba9-5b4e-4042-9e79-b816bac0bc19\") " pod="openshift-ingress-canary/ingress-canary-l547r" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955582 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5ae042df-e0db-4bd4-b519-22c2ab7ac732-bound-sa-token\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955607 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-certificates\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955629 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-bound-sa-token\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955645 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5mwrz\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955665 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f3e5030-65a9-4876-a146-7087d00a33ba-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rnpgp\" (UID: \"7f3e5030-65a9-4876-a146-7087d00a33ba\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955682 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343731ce-b08c-47db-a6d1-16fa2278c711-config\") pod \"kube-controller-manager-operator-78b949d7b-st6cj\" (UID: \"343731ce-b08c-47db-a6d1-16fa2278c711\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955718 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-trusted-ca\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955735 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn7n6\" (UniqueName: \"kubernetes.io/projected/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-kube-api-access-tn7n6\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955758 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8f36c2e7-4cd4-4491-9cd9-824a6917db82-srv-cert\") pod \"catalog-operator-68c6474976-vks5p\" (UID: \"8f36c2e7-4cd4-4491-9cd9-824a6917db82\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955778 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8f36c2e7-4cd4-4491-9cd9-824a6917db82-profile-collector-cert\") pod \"catalog-operator-68c6474976-vks5p\" (UID: \"8f36c2e7-4cd4-4491-9cd9-824a6917db82\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955806 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3bc6fdb3-ebc0-4d1f-b004-8e54086dc349-signing-key\") pod \"service-ca-9c57cc56f-hv779\" (UID: \"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349\") " pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955827 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5ae042df-e0db-4bd4-b519-22c2ab7ac732-trusted-ca\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955886 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-plugins-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955909 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nqwl2\" (UniqueName: \"kubernetes.io/projected/0df460ff-0af9-41a3-ac1c-f06de540df23-kube-api-access-nqwl2\") pod \"machine-config-server-v5r8p\" (UID: \"0df460ff-0af9-41a3-ac1c-f06de540df23\") " pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.955932 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0df460ff-0af9-41a3-ac1c-f06de540df23-certs\") pod \"machine-config-server-v5r8p\" (UID: \"0df460ff-0af9-41a3-ac1c-f06de540df23\") " pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.956166 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/068d0d7d-4bf9-4019-87da-451c2554d6d3-tmpfs\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.956830 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f4cdf5de-0994-48c7-967a-6271fdd5e023-config\") pod \"service-ca-operator-777779d784-dv9wc\" (UID: \"f4cdf5de-0994-48c7-967a-6271fdd5e023\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.957099 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-5mwrz\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.961551 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-registration-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.965161 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/93bf028e-dcbf-4438-bc7a-2003c9f2e88a-srv-cert\") pod \"olm-operator-6b444d44fb-hxnj4\" (UID: \"93bf028e-dcbf-4438-bc7a-2003c9f2e88a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.966250 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0b631f25-1a33-4d3f-9cd6-c932781e8c8b-config-volume\") pod \"dns-default-tqft4\" (UID: \"0b631f25-1a33-4d3f-9cd6-c932781e8c8b\") " pod="openshift-dns/dns-default-tqft4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.966763 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/3bc6fdb3-ebc0-4d1f-b004-8e54086dc349-signing-cabundle\") pod \"service-ca-9c57cc56f-hv779\" (UID: \"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349\") " pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.967990 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-plugins-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.968395 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-trusted-ca\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.969096 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5ae042df-e0db-4bd4-b519-22c2ab7ac732-trusted-ca\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.969220 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/343731ce-b08c-47db-a6d1-16fa2278c711-config\") pod \"kube-controller-manager-operator-78b949d7b-st6cj\" (UID: \"343731ce-b08c-47db-a6d1-16fa2278c711\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.969371 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-csi-data-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.969831 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c-config\") pod \"kube-apiserver-operator-766d6c64bb-pd59k\" (UID: \"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.971024 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-socket-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.971670 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.971975 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.977292 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-tls\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.979544 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ec3e325e-1789-4918-9ebf-dabed8ba1408-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-jrwdq\" (UID: \"ec3e325e-1789-4918-9ebf-dabed8ba1408\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.984620 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-5mwrz\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.984828 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/501636dd-fcee-43af-84c5-56774cc6c48e-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-gftvr\" (UID: \"501636dd-fcee-43af-84c5-56774cc6c48e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.985007 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0b631f25-1a33-4d3f-9cd6-c932781e8c8b-metrics-tls\") pod \"dns-default-tqft4\" (UID: \"0b631f25-1a33-4d3f-9cd6-c932781e8c8b\") " pod="openshift-dns/dns-default-tqft4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.985403 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/704eed42-1e9b-4d8c-be9f-4d237658ae86-installation-pull-secrets\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.985678 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/343731ce-b08c-47db-a6d1-16fa2278c711-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-st6cj\" (UID: \"343731ce-b08c-47db-a6d1-16fa2278c711\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.986134 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f4cdf5de-0994-48c7-967a-6271fdd5e023-serving-cert\") pod \"service-ca-operator-777779d784-dv9wc\" (UID: \"f4cdf5de-0994-48c7-967a-6271fdd5e023\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.986646 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/0df460ff-0af9-41a3-ac1c-f06de540df23-certs\") pod \"machine-config-server-v5r8p\" (UID: \"0df460ff-0af9-41a3-ac1c-f06de540df23\") " pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.987146 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/8f36c2e7-4cd4-4491-9cd9-824a6917db82-srv-cert\") pod \"catalog-operator-68c6474976-vks5p\" (UID: \"8f36c2e7-4cd4-4491-9cd9-824a6917db82\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.987638 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7f3e5030-65a9-4876-a146-7087d00a33ba-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-rnpgp\" (UID: \"7f3e5030-65a9-4876-a146-7087d00a33ba\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.987652 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53349f20-095b-4c88-b827-f3d6d09c15fc-config-volume\") pod \"collect-profiles-29566875-g76qr\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.987728 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-mountpoint-dir\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.989166 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/23ed3e8b-fcc1-446e-bb52-863602c42c6d-service-ca-bundle\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.991147 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5ae042df-e0db-4bd4-b519-22c2ab7ac732-metrics-tls\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.992149 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-certificates\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.993292 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/704eed42-1e9b-4d8c-be9f-4d237658ae86-ca-trust-extracted\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.995271 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.998205 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/93bf028e-dcbf-4438-bc7a-2003c9f2e88a-profile-collector-cert\") pod \"olm-operator-6b444d44fb-hxnj4\" (UID: \"93bf028e-dcbf-4438-bc7a-2003c9f2e88a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.992372 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/a980fba9-5b4e-4042-9e79-b816bac0bc19-cert\") pod \"ingress-canary-l547r\" (UID: \"a980fba9-5b4e-4042-9e79-b816bac0bc19\") " pod="openshift-ingress-canary/ingress-canary-l547r" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.998888 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/501636dd-fcee-43af-84c5-56774cc6c48e-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-gftvr\" (UID: \"501636dd-fcee-43af-84c5-56774cc6c48e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.999508 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ec3e325e-1789-4918-9ebf-dabed8ba1408-proxy-tls\") pod \"machine-config-controller-84d6567774-jrwdq\" (UID: \"ec3e325e-1789-4918-9ebf-dabed8ba1408\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:25:59 crc kubenswrapper[4690]: I0320 13:25:59.999608 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/23ed3e8b-fcc1-446e-bb52-863602c42c6d-default-certificate\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.000088 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/068d0d7d-4bf9-4019-87da-451c2554d6d3-webhook-cert\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.000682 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-pd59k\" (UID: \"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.001402 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/248801a4-0271-4a76-93eb-efea07c28a24-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-g69d4\" (UID: \"248801a4-0271-4a76-93eb-efea07c28a24\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.004076 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/23ed3e8b-fcc1-446e-bb52-863602c42c6d-metrics-certs\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.007525 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/23ed3e8b-fcc1-446e-bb52-863602c42c6d-stats-auth\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.007576 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53349f20-095b-4c88-b827-f3d6d09c15fc-secret-volume\") pod \"collect-profiles-29566875-g76qr\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.008727 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/3bc6fdb3-ebc0-4d1f-b004-8e54086dc349-signing-key\") pod \"service-ca-9c57cc56f-hv779\" (UID: \"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349\") " pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.010442 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/0df460ff-0af9-41a3-ac1c-f06de540df23-node-bootstrap-token\") pod \"machine-config-server-v5r8p\" (UID: \"0df460ff-0af9-41a3-ac1c-f06de540df23\") " pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.012413 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/068d0d7d-4bf9-4019-87da-451c2554d6d3-apiservice-cert\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.013824 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzn4d\" (UniqueName: \"kubernetes.io/projected/8f36c2e7-4cd4-4491-9cd9-824a6917db82-kube-api-access-mzn4d\") pod \"catalog-operator-68c6474976-vks5p\" (UID: \"8f36c2e7-4cd4-4491-9cd9-824a6917db82\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.017624 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/8f36c2e7-4cd4-4491-9cd9-824a6917db82-profile-collector-cert\") pod \"catalog-operator-68c6474976-vks5p\" (UID: \"8f36c2e7-4cd4-4491-9cd9-824a6917db82\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.022424 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-pd59k\" (UID: \"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.039424 4690 ???:1] "http: TLS handshake error from 192.168.126.11:53622: no serving certificate available for the kubelet" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.043373 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bsrgj\" (UniqueName: \"kubernetes.io/projected/617c74e7-0a16-4376-822f-390d3c44c7c5-kube-api-access-bsrgj\") pod \"marketplace-operator-79b997595-5mwrz\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.046896 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-7vkfd"] Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.056837 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:00 crc kubenswrapper[4690]: E0320 13:26:00.057159 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:00.557148148 +0000 UTC m=+206.846748091 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.057602 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.075272 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ck594\" (UniqueName: \"kubernetes.io/projected/248801a4-0271-4a76-93eb-efea07c28a24-kube-api-access-ck594\") pod \"multus-admission-controller-857f4d67dd-g69d4\" (UID: \"248801a4-0271-4a76-93eb-efea07c28a24\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.087485 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/343731ce-b08c-47db-a6d1-16fa2278c711-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-st6cj\" (UID: \"343731ce-b08c-47db-a6d1-16fa2278c711\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.105654 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbmzx\" (UniqueName: \"kubernetes.io/projected/3bc6fdb3-ebc0-4d1f-b004-8e54086dc349-kube-api-access-kbmzx\") pod \"service-ca-9c57cc56f-hv779\" (UID: \"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349\") " pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.124313 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftcvk\" (UniqueName: \"kubernetes.io/projected/f4cdf5de-0994-48c7-967a-6271fdd5e023-kube-api-access-ftcvk\") pod \"service-ca-operator-777779d784-dv9wc\" (UID: \"f4cdf5de-0994-48c7-967a-6271fdd5e023\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.150226 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566886-cp8l8"] Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.150838 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.151425 4690 ???:1] "http: TLS handshake error from 192.168.126.11:53638: no serving certificate available for the kubelet" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.159157 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566886-cp8l8"] Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.159264 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:00 crc kubenswrapper[4690]: E0320 13:26:00.159963 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:00.659942431 +0000 UTC m=+206.949542374 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.171874 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nqwl2\" (UniqueName: \"kubernetes.io/projected/0df460ff-0af9-41a3-ac1c-f06de540df23-kube-api-access-nqwl2\") pod \"machine-config-server-v5r8p\" (UID: \"0df460ff-0af9-41a3-ac1c-f06de540df23\") " pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.172301 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-phttf\" (UniqueName: \"kubernetes.io/projected/93bf028e-dcbf-4438-bc7a-2003c9f2e88a-kube-api-access-phttf\") pod \"olm-operator-6b444d44fb-hxnj4\" (UID: \"93bf028e-dcbf-4438-bc7a-2003c9f2e88a\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.181461 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xppjl\" (UniqueName: \"kubernetes.io/projected/53349f20-095b-4c88-b827-f3d6d09c15fc-kube-api-access-xppjl\") pod \"collect-profiles-29566875-g76qr\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.184324 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.213442 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbdvx\" (UniqueName: \"kubernetes.io/projected/5ae042df-e0db-4bd4-b519-22c2ab7ac732-kube-api-access-rbdvx\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.232060 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5ae042df-e0db-4bd4-b519-22c2ab7ac732-bound-sa-token\") pod \"ingress-operator-5b745b69d9-brvzm\" (UID: \"5ae042df-e0db-4bd4-b519-22c2ab7ac732\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.245610 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fpxs\" (UniqueName: \"kubernetes.io/projected/a980fba9-5b4e-4042-9e79-b816bac0bc19-kube-api-access-7fpxs\") pod \"ingress-canary-l547r\" (UID: \"a980fba9-5b4e-4042-9e79-b816bac0bc19\") " pod="openshift-ingress-canary/ingress-canary-l547r" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.248312 4690 ???:1] "http: TLS handshake error from 192.168.126.11:53642: no serving certificate available for the kubelet" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.256261 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.264718 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.264838 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7bth\" (UniqueName: \"kubernetes.io/projected/6db1d803-f871-41d2-b6a7-0b3456af1ddf-kube-api-access-t7bth\") pod \"auto-csr-approver-29566886-cp8l8\" (UID: \"6db1d803-f871-41d2-b6a7-0b3456af1ddf\") " pod="openshift-infra/auto-csr-approver-29566886-cp8l8" Mar 20 13:26:00 crc kubenswrapper[4690]: E0320 13:26:00.265298 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:00.765283902 +0000 UTC m=+207.054883845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.270531 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp"] Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.273473 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.281028 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.282960 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh"] Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.287302 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5m9q\" (UniqueName: \"kubernetes.io/projected/501636dd-fcee-43af-84c5-56774cc6c48e-kube-api-access-k5m9q\") pod \"kube-storage-version-migrator-operator-b67b599dd-gftvr\" (UID: \"501636dd-fcee-43af-84c5-56774cc6c48e\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.287694 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2kcn\" (UniqueName: \"kubernetes.io/projected/068d0d7d-4bf9-4019-87da-451c2554d6d3-kube-api-access-q2kcn\") pod \"packageserver-d55dfcdfc-vzblt\" (UID: \"068d0d7d-4bf9-4019-87da-451c2554d6d3\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.298473 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n"] Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.305736 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.307255 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.320591 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-hv779" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.320637 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.325462 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hvnx\" (UniqueName: \"kubernetes.io/projected/76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7-kube-api-access-8hvnx\") pod \"csi-hostpathplugin-69ksw\" (UID: \"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7\") " pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.331102 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.336086 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.339524 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.344640 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.344937 4690 ???:1] "http: TLS handshake error from 192.168.126.11:53650: no serving certificate available for the kubelet" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.365920 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.366461 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7bth\" (UniqueName: \"kubernetes.io/projected/6db1d803-f871-41d2-b6a7-0b3456af1ddf-kube-api-access-t7bth\") pod \"auto-csr-approver-29566886-cp8l8\" (UID: \"6db1d803-f871-41d2-b6a7-0b3456af1ddf\") " pod="openshift-infra/auto-csr-approver-29566886-cp8l8" Mar 20 13:26:00 crc kubenswrapper[4690]: E0320 13:26:00.366586 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:00.866562893 +0000 UTC m=+207.156162846 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.370530 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5qxv\" (UniqueName: \"kubernetes.io/projected/23ed3e8b-fcc1-446e-bb52-863602c42c6d-kube-api-access-f5qxv\") pod \"router-default-5444994796-kl5sr\" (UID: \"23ed3e8b-fcc1-446e-bb52-863602c42c6d\") " pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.388921 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-69ksw" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.393089 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn7n6\" (UniqueName: \"kubernetes.io/projected/4ae7b233-0f48-40ef-8952-f01e9cc27d1d-kube-api-access-tn7n6\") pod \"cluster-image-registry-operator-dc59b4c8b-fcl8b\" (UID: \"4ae7b233-0f48-40ef-8952-f01e9cc27d1d\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.395300 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-v5r8p" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.403073 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-l547r" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.424157 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-bound-sa-token\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.459273 4690 ???:1] "http: TLS handshake error from 192.168.126.11:53658: no serving certificate available for the kubelet" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.467621 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.473057 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" Mar 20 13:26:00 crc kubenswrapper[4690]: E0320 13:26:00.474427 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:00.974399941 +0000 UTC m=+207.263999884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.495182 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.496110 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb"] Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.506237 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.506715 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pvh2\" (UniqueName: \"kubernetes.io/projected/0b631f25-1a33-4d3f-9cd6-c932781e8c8b-kube-api-access-9pvh2\") pod \"dns-default-tqft4\" (UID: \"0b631f25-1a33-4d3f-9cd6-c932781e8c8b\") " pod="openshift-dns/dns-default-tqft4" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.509329 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hflt6\" (UniqueName: \"kubernetes.io/projected/7f3e5030-65a9-4876-a146-7087d00a33ba-kube-api-access-hflt6\") pod \"package-server-manager-789f6589d5-rnpgp\" (UID: \"7f3e5030-65a9-4876-a146-7087d00a33ba\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.509684 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8m7d\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-kube-api-access-r8m7d\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.510324 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" event={"ID":"d671e62e-b720-47ba-b4eb-e93671f7e327","Type":"ContainerStarted","Data":"d2ae06cf3ef55eb32dbbbccbdadc92ce665c38fc9ffcad3a1f19ebb0a5161065"} Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.510358 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" event={"ID":"d671e62e-b720-47ba-b4eb-e93671f7e327","Type":"ContainerStarted","Data":"b856bb06a6e612396308f538830aa2943a3a9e67f82086744e583ec668e0af5d"} Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.526411 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52mt5\" (UniqueName: \"kubernetes.io/projected/ec3e325e-1789-4918-9ebf-dabed8ba1408-kube-api-access-52mt5\") pod \"machine-config-controller-84d6567774-jrwdq\" (UID: \"ec3e325e-1789-4918-9ebf-dabed8ba1408\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.550063 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" event={"ID":"f323c12d-88be-4cc8-908f-adad081907d2","Type":"ContainerStarted","Data":"50717ffaf8681147e424553bc4ecc3a615cda277bb057730f19e793736d57cc6"} Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.550107 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" event={"ID":"f323c12d-88be-4cc8-908f-adad081907d2","Type":"ContainerStarted","Data":"846582b48fc3a971062de06cc7d8577ed3d3ee5ce122dc430c684fc538a24cf1"} Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.558065 4690 ???:1] "http: TLS handshake error from 192.168.126.11:42184: no serving certificate available for the kubelet" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.560908 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.567624 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" event={"ID":"1963bdcc-e63d-4227-8f00-8c3900996a30","Type":"ContainerStarted","Data":"4500a0b758114c7803666d776f3045ac874ed173ee606b987482b1e6919d8043"} Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.567694 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" event={"ID":"1963bdcc-e63d-4227-8f00-8c3900996a30","Type":"ContainerStarted","Data":"e1850f1e34df7e354b231b8ecdf432482f6e950f2b0775af5fc4e3c53dcfd955"} Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.568871 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" event={"ID":"a1ebb7a8-ac20-491f-b670-afb617d1e060","Type":"ContainerStarted","Data":"acdbe5557873bf635e6336915eace6a3734bccc5d3fc62f4fc720ec4b300bbeb"} Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.579804 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7bth\" (UniqueName: \"kubernetes.io/projected/6db1d803-f871-41d2-b6a7-0b3456af1ddf-kube-api-access-t7bth\") pod \"auto-csr-approver-29566886-cp8l8\" (UID: \"6db1d803-f871-41d2-b6a7-0b3456af1ddf\") " pod="openshift-infra/auto-csr-approver-29566886-cp8l8" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.586273 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:00 crc kubenswrapper[4690]: E0320 13:26:00.587589 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.087565652 +0000 UTC m=+207.377165655 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.592378 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7vkfd" event={"ID":"4874f99f-2938-475f-872a-c7a794ae4818","Type":"ContainerStarted","Data":"980a87debb313d3b6e53ec69fe3c0abfdd344508a43f21159c5932d5747545f1"} Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.595164 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.601696 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.651242 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.665689 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.690266 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:00 crc kubenswrapper[4690]: E0320 13:26:00.691690 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.191670551 +0000 UTC m=+207.481270574 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.707526 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-tqft4" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.792400 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:00 crc kubenswrapper[4690]: E0320 13:26:00.793194 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.292973093 +0000 UTC m=+207.582573036 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.793873 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-f6j8r" podStartSLOduration=159.793857613 podStartE2EDuration="2m39.793857613s" podCreationTimestamp="2026-03-20 13:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:00.756330036 +0000 UTC m=+207.045929979" watchObservedRunningTime="2026-03-20 13:26:00.793857613 +0000 UTC m=+207.083457556" Mar 20 13:26:00 crc kubenswrapper[4690]: I0320 13:26:00.904565 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:00 crc kubenswrapper[4690]: E0320 13:26:00.904976 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.404962333 +0000 UTC m=+207.694562276 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.005254 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.005451 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.505413335 +0000 UTC m=+207.795013278 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.005897 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.006231 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.506217933 +0000 UTC m=+207.795817876 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.111490 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.111682 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.611666538 +0000 UTC m=+207.901266471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.111744 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.112102 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.612093752 +0000 UTC m=+207.901693695 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.212369 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.212777 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.712749562 +0000 UTC m=+208.002349505 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.242021 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-mcccn" podStartSLOduration=160.242002513 podStartE2EDuration="2m40.242002513s" podCreationTimestamp="2026-03-20 13:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:01.240604295 +0000 UTC m=+207.530204228" watchObservedRunningTime="2026-03-20 13:26:01.242002513 +0000 UTC m=+207.531602456" Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.251332 4690 ???:1] "http: TLS handshake error from 192.168.126.11:42192: no serving certificate available for the kubelet" Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.313751 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.314228 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.814212149 +0000 UTC m=+208.103812092 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.414553 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.415357 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:01.915336564 +0000 UTC m=+208.204936507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.483266 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-kw6wc" podStartSLOduration=160.483252642 podStartE2EDuration="2m40.483252642s" podCreationTimestamp="2026-03-20 13:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:01.48146966 +0000 UTC m=+207.771069603" watchObservedRunningTime="2026-03-20 13:26:01.483252642 +0000 UTC m=+207.772852575" Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.516079 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.516604 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.016582974 +0000 UTC m=+208.306182987 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.598946 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-kl5sr" event={"ID":"23ed3e8b-fcc1-446e-bb52-863602c42c6d","Type":"ContainerStarted","Data":"e7454a2d96bf8002f4158ccbc05415ebdd46a32b5bebfaa155a1a39a18777ea3"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.598996 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-kl5sr" event={"ID":"23ed3e8b-fcc1-446e-bb52-863602c42c6d","Type":"ContainerStarted","Data":"2a67a4dbff03bcc9e150ee989237c43368a596e1a15492424030497cafbde611"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.602272 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-v5r8p" event={"ID":"0df460ff-0af9-41a3-ac1c-f06de540df23","Type":"ContainerStarted","Data":"a16d939f9667f45bc350d1303af9da506ccfd656b30beac3b291a201b6b20702"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.602316 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-v5r8p" event={"ID":"0df460ff-0af9-41a3-ac1c-f06de540df23","Type":"ContainerStarted","Data":"2dcef4a5120e9c982712cca0991137c2db65c28ea145c9d17277f2dff788e52e"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.611656 4690 generic.go:334] "Generic (PLEG): container finished" podID="a1ebb7a8-ac20-491f-b670-afb617d1e060" containerID="b08a68153e9664c2fc0674c33ae457833fd214cc131e815a9ff4709b3ea43530" exitCode=0 Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.612702 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" event={"ID":"a1ebb7a8-ac20-491f-b670-afb617d1e060","Type":"ContainerDied","Data":"b08a68153e9664c2fc0674c33ae457833fd214cc131e815a9ff4709b3ea43530"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.615337 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" event={"ID":"345c7db2-4067-402c-bddf-3a497a9540c2","Type":"ContainerStarted","Data":"0c931b3ea66caaa0c191519844ad432f09f6eef14007624ede266966732d2ba0"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.615389 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" event={"ID":"345c7db2-4067-402c-bddf-3a497a9540c2","Type":"ContainerStarted","Data":"0370f1b3f4825a008b102701497deff3293b7c44211b709b49ea935560f51922"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.615888 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.616749 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.617243 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.117224643 +0000 UTC m=+208.406824596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.618194 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" event={"ID":"19d07388-56ad-4bb6-bacb-2eec91c18aa8","Type":"ContainerStarted","Data":"d2cf69d3089d738801f87ea89655d8613f9374da6819ad5eb6829ea8eb2ab068"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.618230 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" event={"ID":"19d07388-56ad-4bb6-bacb-2eec91c18aa8","Type":"ContainerStarted","Data":"e518b03e2eb272de59babff1b9da67ee8bce9be7a0fb3746345fd88da5d901ce"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.640097 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-7vkfd" event={"ID":"4874f99f-2938-475f-872a-c7a794ae4818","Type":"ContainerStarted","Data":"41efca243c20d31f10b57df2736d773f684367b6951fac88ba078f7c5d760bb8"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.641189 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-7vkfd" Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.642827 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb" event={"ID":"4703b44a-9eae-47ce-83bf-e2b66d4b3d91","Type":"ContainerStarted","Data":"18abf00c8622e41a09f16f6e4cd8ef3b0daf2add7af2f54826f83836130fc27b"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.642910 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb" event={"ID":"4703b44a-9eae-47ce-83bf-e2b66d4b3d91","Type":"ContainerStarted","Data":"8df8f734048c60a6b3b6508c5cd1615cecd5ea9e33a2e8c7d63131a60bf34a50"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.653045 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" event={"ID":"1963bdcc-e63d-4227-8f00-8c3900996a30","Type":"ContainerStarted","Data":"7dddd0c9e106ff46e825ba91e4c726ec253b6c48f0b0839774e39c9d36175e2a"} Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.665781 4690 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-p4fhp container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.665925 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" podUID="345c7db2-4067-402c-bddf-3a497a9540c2" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.18:8443/healthz\": dial tcp 10.217.0.18:8443: connect: connection refused" Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.666634 4690 patch_prober.go:28] interesting pod/downloads-7954f5f757-7vkfd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.666736 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7vkfd" podUID="4874f99f-2938-475f-872a-c7a794ae4818" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.727412 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.730831 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.230811789 +0000 UTC m=+208.520411752 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.748732 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" podStartSLOduration=159.748708158 podStartE2EDuration="2m39.748708158s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:01.7102844 +0000 UTC m=+207.999884343" watchObservedRunningTime="2026-03-20 13:26:01.748708158 +0000 UTC m=+208.038308111" Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.829502 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.830504 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.330488395 +0000 UTC m=+208.620088338 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.845006 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-h2jxx" podStartSLOduration=159.844987416 podStartE2EDuration="2m39.844987416s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:01.842378716 +0000 UTC m=+208.131978659" watchObservedRunningTime="2026-03-20 13:26:01.844987416 +0000 UTC m=+208.134587349" Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.895419 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-5t8t9"] Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.944882 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:01 crc kubenswrapper[4690]: E0320 13:26:01.945296 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.445285263 +0000 UTC m=+208.734885206 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.970682 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-8gsqv"] Mar 20 13:26:01 crc kubenswrapper[4690]: I0320 13:26:01.994688 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-2xhf5"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.049832 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.050285 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.550269742 +0000 UTC m=+208.839869685 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.062548 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7pppm" podStartSLOduration=160.062527886 podStartE2EDuration="2m40.062527886s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.007903337 +0000 UTC m=+208.297503280" watchObservedRunningTime="2026-03-20 13:26:02.062527886 +0000 UTC m=+208.352127829" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.063399 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rj8zv"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.065542 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.073042 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-6c8pc"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.094125 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.142025 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gwlgr" podStartSLOduration=160.142010603 podStartE2EDuration="2m40.142010603s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.141523106 +0000 UTC m=+208.431123049" watchObservedRunningTime="2026-03-20 13:26:02.142010603 +0000 UTC m=+208.431610546" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.154035 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.154296 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.654284827 +0000 UTC m=+208.943884770 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.174753 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" podStartSLOduration=160.174733284 podStartE2EDuration="2m40.174733284s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.171462361 +0000 UTC m=+208.461062304" watchObservedRunningTime="2026-03-20 13:26:02.174733284 +0000 UTC m=+208.464333247" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.255236 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.255675 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.755660282 +0000 UTC m=+209.045260225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.284400 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-vcp4n" podStartSLOduration=160.284382934 podStartE2EDuration="2m40.284382934s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.245147908 +0000 UTC m=+208.534747851" watchObservedRunningTime="2026-03-20 13:26:02.284382934 +0000 UTC m=+208.573982867" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.285176 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.293350 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-v5r8p" podStartSLOduration=5.293315513 podStartE2EDuration="5.293315513s" podCreationTimestamp="2026-03-20 13:25:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.282332273 +0000 UTC m=+208.571932216" watchObservedRunningTime="2026-03-20 13:26:02.293315513 +0000 UTC m=+208.582915456" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.325364 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-7vkfd" podStartSLOduration=160.32534512 podStartE2EDuration="2m40.32534512s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.324297124 +0000 UTC m=+208.613897057" watchObservedRunningTime="2026-03-20 13:26:02.32534512 +0000 UTC m=+208.614945053" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.338380 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp"] Mar 20 13:26:02 crc kubenswrapper[4690]: W0320 13:26:02.342955 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f36c2e7_4cd4_4491_9cd9_824a6917db82.slice/crio-b92a95ec16ee20c480231b10d3b29e22bd4d91311fb76c430371262e9e8a2950 WatchSource:0}: Error finding container b92a95ec16ee20c480231b10d3b29e22bd4d91311fb76c430371262e9e8a2950: Status 404 returned error can't find the container with id b92a95ec16ee20c480231b10d3b29e22bd4d91311fb76c430371262e9e8a2950 Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.349580 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.356209 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.356539 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.856528758 +0000 UTC m=+209.146128701 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.365132 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-kl5sr" podStartSLOduration=160.365115255 podStartE2EDuration="2m40.365115255s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.364575066 +0000 UTC m=+208.654175009" watchObservedRunningTime="2026-03-20 13:26:02.365115255 +0000 UTC m=+208.654715198" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.393831 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-st5qs"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.393890 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-hv779"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.393901 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.403828 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.403887 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.459402 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.459567 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.959543759 +0000 UTC m=+209.249143702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.459703 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.460029 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:02.960021276 +0000 UTC m=+209.249621219 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.473532 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-g69d4"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.473569 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5mwrz"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.474876 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.479167 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.485122 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.489887 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-l547r"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.508396 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.508446 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.516343 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-tqft4"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.563655 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.566823 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.567657 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.067622515 +0000 UTC m=+209.357222468 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.568585 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.578448 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.078416768 +0000 UTC m=+209.368016701 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.580026 4690 ???:1] "http: TLS handshake error from 192.168.126.11:42194: no serving certificate available for the kubelet" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.580119 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:02 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:02 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:02 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.580165 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.604174 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.625495 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.636113 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-69ksw"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.677769 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566886-cp8l8"] Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.685044 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.685431 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.185416147 +0000 UTC m=+209.475016090 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.746019 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" event={"ID":"27c73b63-ed31-4aae-bc66-5b4707f469f5","Type":"ContainerStarted","Data":"8174b1ecd69e4edcf94fdadf10534be8e1c20e4d17b5da8fb61a847b35f6382a"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.746255 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" event={"ID":"27c73b63-ed31-4aae-bc66-5b4707f469f5","Type":"ContainerStarted","Data":"72a24b0fece53631f1a63b8cdc89f0e742eb7be8bd825f72f1fadb5769022639"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.760267 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" event={"ID":"95727575-c5bb-4eb1-9f36-29d2acc3f7ce","Type":"ContainerStarted","Data":"43e0917d790a0734851a89d6fe0569a6b8e94ee3fab06c4201807981c6e4c5c0"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.769832 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" event={"ID":"343731ce-b08c-47db-a6d1-16fa2278c711","Type":"ContainerStarted","Data":"e4361d2d44a6e477ea8f1631756c73bf886aa68d77c42586752399d8415e7887"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.772315 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" event={"ID":"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c","Type":"ContainerStarted","Data":"a58241e91441db63977fb10cacd402320baf1aa2f51357bdf2b2d428e5fe2373"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.787202 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.787697 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.287683182 +0000 UTC m=+209.577283125 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.812081 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb" event={"ID":"4703b44a-9eae-47ce-83bf-e2b66d4b3d91","Type":"ContainerStarted","Data":"534a836298cc99ff3da08ef5a69e2705486ace72279b9bcc992ee4ee06cbb06c"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.823247 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" event={"ID":"aae7d681-bfbe-4280-92b9-f117157b6be8","Type":"ContainerStarted","Data":"c24dd2c5bd51337c68e7e009940179c89dd2dfbff81852a586d1e6f9b3134ece"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.848147 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-hbdxb" podStartSLOduration=160.848128271 podStartE2EDuration="2m40.848128271s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.846285837 +0000 UTC m=+209.135885790" watchObservedRunningTime="2026-03-20 13:26:02.848128271 +0000 UTC m=+209.137728214" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.862222 4690 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.870235 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" event={"ID":"7f3e5030-65a9-4876-a146-7087d00a33ba","Type":"ContainerStarted","Data":"22ca049fbec309ed5830e65b282e7d8989be5cdb6f6ffec538fbbb2e684361e6"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.877141 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5t8t9" event={"ID":"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1","Type":"ContainerStarted","Data":"83822791874bab492da3fc1e0b7e629f878553427f95fccbe23423c885df1f82"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.877189 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-5t8t9" event={"ID":"973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1","Type":"ContainerStarted","Data":"cfad6b060d527854cdfbeb83b13cb6a7038d114e1663c52360f9b3d6ef7a88f7"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.877301 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.888780 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.889255 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.389237852 +0000 UTC m=+209.678837785 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.890149 4690 patch_prober.go:28] interesting pod/console-operator-58897d9998-5t8t9 container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.894347 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-5t8t9" podUID="973e1b1c-1dc2-46b8-a9da-e00a9f7b4da1" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.12:8443/readyz\": dial tcp 10.217.0.12:8443: connect: connection refused" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.894767 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.895061 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.395049333 +0000 UTC m=+209.684649276 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.900214 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-5t8t9" podStartSLOduration=160.900193411 podStartE2EDuration="2m40.900193411s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.897811068 +0000 UTC m=+209.187411011" watchObservedRunningTime="2026-03-20 13:26:02.900193411 +0000 UTC m=+209.189793354" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.903791 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" event={"ID":"617c74e7-0a16-4376-822f-390d3c44c7c5","Type":"ContainerStarted","Data":"e8648d2cf5042c3afe37038af79793247700fad78d315463827f7c345ee4acc7"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.907512 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" event={"ID":"4ae7b233-0f48-40ef-8952-f01e9cc27d1d","Type":"ContainerStarted","Data":"d9b182a2969fa508abfb252119d9989b811631462c4f342861f98acc4fc3ddea"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.909690 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" event={"ID":"a1ebb7a8-ac20-491f-b670-afb617d1e060","Type":"ContainerStarted","Data":"762c0882af2ee71b6d518d39eea4ddd36ef083023ece885ee237bfb3329a315e"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.913769 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" event={"ID":"501636dd-fcee-43af-84c5-56774cc6c48e","Type":"ContainerStarted","Data":"f78b3834131132dc3430e002fa11744604fdb3635797f0fc9eb5904ac30df58f"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.918634 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" event={"ID":"4c230027-5b5a-4c39-8594-c09c36112ab8","Type":"ContainerStarted","Data":"e1fac00be1083b49eb009d494a208e030e01cc199d216b156a0378f123ad1ce2"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.918677 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" event={"ID":"4c230027-5b5a-4c39-8594-c09c36112ab8","Type":"ContainerStarted","Data":"ea95d48df5144b7e4cd66051f646b5a6bebfb8ed194e60a790747ef87590552e"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.947569 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" event={"ID":"f4cdf5de-0994-48c7-967a-6271fdd5e023","Type":"ContainerStarted","Data":"1e42e3df45c5329fe8379a06c9282bc0a3d23892848c28629311953f9ed6683a"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.967159 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-hv779" event={"ID":"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349","Type":"ContainerStarted","Data":"26b47e42c349a3ef73f96dede20916846705d66ba18d24497727924b650450e2"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.969791 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" podStartSLOduration=160.969769166 podStartE2EDuration="2m40.969769166s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:02.96843654 +0000 UTC m=+209.258036493" watchObservedRunningTime="2026-03-20 13:26:02.969769166 +0000 UTC m=+209.259369109" Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.981623 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" event={"ID":"8f36c2e7-4cd4-4491-9cd9-824a6917db82","Type":"ContainerStarted","Data":"b92a95ec16ee20c480231b10d3b29e22bd4d91311fb76c430371262e9e8a2950"} Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.998725 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:02 crc kubenswrapper[4690]: E0320 13:26:02.998930 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.498911683 +0000 UTC m=+209.788511626 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:02 crc kubenswrapper[4690]: I0320 13:26:02.999168 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.000620 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.500611982 +0000 UTC m=+209.790211925 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.002082 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" event={"ID":"4694c58d-e630-4eff-a677-d13aca00fcab","Type":"ContainerStarted","Data":"6b7b4985fbc7aea0912961a61fada869e8914a506a341a4c4d0d24ef5627924c"} Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.008182 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" event={"ID":"6623fd0e-357f-459b-8e04-6da531bf7b7b","Type":"ContainerStarted","Data":"5ae3298c6621a3ebc6ea513d61a9fb4b1f9576670b43c3556623db6d0995b709"} Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.016959 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" event={"ID":"8f79ce18-3787-4106-882d-73271ab0018d","Type":"ContainerStarted","Data":"f729d7a28d2842a30a9719f8f441d438592d58770cff0a25338c1d8be333e5ee"} Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.018287 4690 patch_prober.go:28] interesting pod/downloads-7954f5f757-7vkfd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.018323 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7vkfd" podUID="4874f99f-2938-475f-872a-c7a794ae4818" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.026776 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" podStartSLOduration=161.026759426 podStartE2EDuration="2m41.026759426s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:03.026453035 +0000 UTC m=+209.316052988" watchObservedRunningTime="2026-03-20 13:26:03.026759426 +0000 UTC m=+209.316359369" Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.030189 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.099591 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.101120 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.601100535 +0000 UTC m=+209.890700478 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.202880 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.203370 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.70335964 +0000 UTC m=+209.992959583 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.303625 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.303960 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.803945037 +0000 UTC m=+210.093544980 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.303988 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.304219 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.804213646 +0000 UTC m=+210.093813589 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.404572 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.405044 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:03.905025311 +0000 UTC m=+210.194625254 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.423778 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fdlk7"] Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.424561 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" podUID="5c8072fb-34b5-4c43-895b-9ccb724b9199" containerName="controller-manager" containerID="cri-o://b58f46ebf351c60d412a292be702091397054d895ee8ff16acb322fb00d89d2d" gracePeriod=30 Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.435075 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp"] Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.506289 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.506624 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.006612472 +0000 UTC m=+210.296212415 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.572080 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:03 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:03 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:03 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.572132 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.610106 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.610384 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.110368669 +0000 UTC m=+210.399968612 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.711955 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.712400 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.212389826 +0000 UTC m=+210.501989769 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.813249 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.813631 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.313611664 +0000 UTC m=+210.603211607 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:03 crc kubenswrapper[4690]: I0320 13:26:03.915680 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:03 crc kubenswrapper[4690]: E0320 13:26:03.916066 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.416050385 +0000 UTC m=+210.705650328 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.007609 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.016364 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.016744 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.516729965 +0000 UTC m=+210.806329908 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.058351 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" event={"ID":"6db1d803-f871-41d2-b6a7-0b3456af1ddf","Type":"ContainerStarted","Data":"e58ac6245d8ffd8446b904497c6144be8ec2d0e0eac0e9a0349c0718cc2a2875"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.062628 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" event={"ID":"4694c58d-e630-4eff-a677-d13aca00fcab","Type":"ContainerStarted","Data":"0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.063965 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.070425 4690 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-rj8zv container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" start-of-body= Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.070476 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" podUID="4694c58d-e630-4eff-a677-d13aca00fcab" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.8:6443/healthz\": dial tcp 10.217.0.8:6443: connect: connection refused" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.071556 4690 generic.go:334] "Generic (PLEG): container finished" podID="5c8072fb-34b5-4c43-895b-9ccb724b9199" containerID="b58f46ebf351c60d412a292be702091397054d895ee8ff16acb322fb00d89d2d" exitCode=0 Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.071610 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" event={"ID":"5c8072fb-34b5-4c43-895b-9ccb724b9199","Type":"ContainerDied","Data":"b58f46ebf351c60d412a292be702091397054d895ee8ff16acb322fb00d89d2d"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.105712 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-hv779" event={"ID":"3bc6fdb3-ebc0-4d1f-b004-8e54086dc349","Type":"ContainerStarted","Data":"c6678f4ef2ccf747aa2409c8f6996ec82076b9658b13177e470ac7b703b01dfb"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.115888 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" event={"ID":"7f3e5030-65a9-4876-a146-7087d00a33ba","Type":"ContainerStarted","Data":"f8f2dc8d6995871562020a01cd5f190ea99d8f2cf44625d3bce5c0ecfb451576"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.118634 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.120247 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.620234953 +0000 UTC m=+210.909834896 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.122663 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" podStartSLOduration=163.122648607 podStartE2EDuration="2m43.122648607s" podCreationTimestamp="2026-03-20 13:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.095487798 +0000 UTC m=+210.385087751" watchObservedRunningTime="2026-03-20 13:26:04.122648607 +0000 UTC m=+210.412248550" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.126488 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" event={"ID":"93bf028e-dcbf-4438-bc7a-2003c9f2e88a","Type":"ContainerStarted","Data":"d283ff5fedd57ee798abf5d60aa3dd6f639b02241857c46a1ac6d99eed53636a"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.142628 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-l547r" event={"ID":"a980fba9-5b4e-4042-9e79-b816bac0bc19","Type":"ContainerStarted","Data":"03fb589bba0c49c1b74744e526de43430a6ec94cae4ce8f05c2e54569dfa26cb"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.160314 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.169734 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-hv779" podStartSLOduration=162.169712604 podStartE2EDuration="2m42.169712604s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.121622561 +0000 UTC m=+210.411222514" watchObservedRunningTime="2026-03-20 13:26:04.169712604 +0000 UTC m=+210.459312547" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.170156 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-l547r" podStartSLOduration=7.170150969 podStartE2EDuration="7.170150969s" podCreationTimestamp="2026-03-20 13:25:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.16932163 +0000 UTC m=+210.458921583" watchObservedRunningTime="2026-03-20 13:26:04.170150969 +0000 UTC m=+210.459750912" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.187106 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" event={"ID":"8f36c2e7-4cd4-4491-9cd9-824a6917db82","Type":"ContainerStarted","Data":"661de6db1d9a546264206ac049439ddf52114daa521f769c455bc725a79dd783"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.187163 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.192463 4690 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-vks5p container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.192515 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" podUID="8f36c2e7-4cd4-4491-9cd9-824a6917db82" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.223369 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-client-ca\") pod \"5c8072fb-34b5-4c43-895b-9ccb724b9199\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.223445 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c8072fb-34b5-4c43-895b-9ccb724b9199-serving-cert\") pod \"5c8072fb-34b5-4c43-895b-9ccb724b9199\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.223577 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.223623 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-config\") pod \"5c8072fb-34b5-4c43-895b-9ccb724b9199\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.223708 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-proxy-ca-bundles\") pod \"5c8072fb-34b5-4c43-895b-9ccb724b9199\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.223737 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hhwgx\" (UniqueName: \"kubernetes.io/projected/5c8072fb-34b5-4c43-895b-9ccb724b9199-kube-api-access-hhwgx\") pod \"5c8072fb-34b5-4c43-895b-9ccb724b9199\" (UID: \"5c8072fb-34b5-4c43-895b-9ccb724b9199\") " Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.224031 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.72400609 +0000 UTC m=+211.013606033 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.224180 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" event={"ID":"27c73b63-ed31-4aae-bc66-5b4707f469f5","Type":"ContainerStarted","Data":"f12832a6d1723fa77a043885e59848e3381f72f7ce35a7db9d0164c3338c09e4"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.224226 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.225207 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.725190831 +0000 UTC m=+211.014790874 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.225322 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-client-ca" (OuterVolumeSpecName: "client-ca") pod "5c8072fb-34b5-4c43-895b-9ccb724b9199" (UID: "5c8072fb-34b5-4c43-895b-9ccb724b9199"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.228625 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5c8072fb-34b5-4c43-895b-9ccb724b9199" (UID: "5c8072fb-34b5-4c43-895b-9ccb724b9199"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.234105 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c8072fb-34b5-4c43-895b-9ccb724b9199-kube-api-access-hhwgx" (OuterVolumeSpecName: "kube-api-access-hhwgx") pod "5c8072fb-34b5-4c43-895b-9ccb724b9199" (UID: "5c8072fb-34b5-4c43-895b-9ccb724b9199"). InnerVolumeSpecName "kube-api-access-hhwgx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.237975 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c8072fb-34b5-4c43-895b-9ccb724b9199-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5c8072fb-34b5-4c43-895b-9ccb724b9199" (UID: "5c8072fb-34b5-4c43-895b-9ccb724b9199"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.239341 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-config" (OuterVolumeSpecName: "config") pod "5c8072fb-34b5-4c43-895b-9ccb724b9199" (UID: "5c8072fb-34b5-4c43-895b-9ccb724b9199"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.255126 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" event={"ID":"5ae042df-e0db-4bd4-b519-22c2ab7ac732","Type":"ContainerStarted","Data":"d87517304a6f9727d49cc7e0cc284d7bce3078172eb0429dde3a64aa86fb5d25"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.255170 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" event={"ID":"5ae042df-e0db-4bd4-b519-22c2ab7ac732","Type":"ContainerStarted","Data":"0d78cf56acd5b9dee9d9a6b9f7399f2c066011a86a5708bcc02b3486aeb4e1cf"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.265353 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" podStartSLOduration=162.265338019 podStartE2EDuration="2m42.265338019s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.227055626 +0000 UTC m=+210.516655569" watchObservedRunningTime="2026-03-20 13:26:04.265338019 +0000 UTC m=+210.554937962" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.266933 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" event={"ID":"53349f20-095b-4c88-b827-f3d6d09c15fc","Type":"ContainerStarted","Data":"a2ee321c4f4f1bbfce13b2edc5de45a201a7b7bc8f2b7794b26b12d75cf68200"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.266996 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" event={"ID":"53349f20-095b-4c88-b827-f3d6d09c15fc","Type":"ContainerStarted","Data":"fa0741a966f1cb6e45ad1cc2a7b1fc4b4a336883e1ed4d97ac58eb9724300aa5"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.278208 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" event={"ID":"95727575-c5bb-4eb1-9f36-29d2acc3f7ce","Type":"ContainerStarted","Data":"d527349723a3b26fe4e9a9f867e51877306981012628af9db78685475e0b856b"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.281367 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" event={"ID":"ec3e325e-1789-4918-9ebf-dabed8ba1408","Type":"ContainerStarted","Data":"616dbdce7264afb0f280b604161bf195a9fd1b7cb8b85198c979f0e5224c5a9a"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.299225 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" event={"ID":"e26c3b04-aca8-46b5-a653-8257d4458d5f","Type":"ContainerStarted","Data":"35890efcd6a86ba1ec25847f4c21047a9be133702271a1689830b9579471b135"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.301015 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" podStartSLOduration=162.300998422 podStartE2EDuration="2m42.300998422s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.299762409 +0000 UTC m=+210.589362352" watchObservedRunningTime="2026-03-20 13:26:04.300998422 +0000 UTC m=+210.590598365" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.302012 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-8gsqv" podStartSLOduration=162.302005986 podStartE2EDuration="2m42.302005986s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.266030373 +0000 UTC m=+210.555630316" watchObservedRunningTime="2026-03-20 13:26:04.302005986 +0000 UTC m=+210.591605929" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.309420 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" event={"ID":"343731ce-b08c-47db-a6d1-16fa2278c711","Type":"ContainerStarted","Data":"f66c40ff91a5b813b2ab5b98ffbdac1853f5b027bbad8875f853b430bcb5e0e6"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.327022 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.327656 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.327686 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.327698 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hhwgx\" (UniqueName: \"kubernetes.io/projected/5c8072fb-34b5-4c43-895b-9ccb724b9199-kube-api-access-hhwgx\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.327709 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5c8072fb-34b5-4c43-895b-9ccb724b9199-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.327720 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5c8072fb-34b5-4c43-895b-9ccb724b9199-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.328062 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.828042106 +0000 UTC m=+211.117642049 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.329299 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-pd6xp" podStartSLOduration=162.329281249 podStartE2EDuration="2m42.329281249s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.326800743 +0000 UTC m=+210.616400686" watchObservedRunningTime="2026-03-20 13:26:04.329281249 +0000 UTC m=+210.618881192" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.335513 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" event={"ID":"8f79ce18-3787-4106-882d-73271ab0018d","Type":"ContainerStarted","Data":"3c1b5cac0c12b3e76bc96269039f0c8c9add28d617c9c34094b761f0d780f910"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.344236 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-69ksw" event={"ID":"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7","Type":"ContainerStarted","Data":"3d0ebcad01a64ba4ce152b2fccfbe22b5dbd00c2bc662c0ded7e188dac518af1"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.365265 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" event={"ID":"501636dd-fcee-43af-84c5-56774cc6c48e","Type":"ContainerStarted","Data":"fe1349b57753d7133e88a9d44a2b3654a7ba229d9788b3913f7fbb4fc06b9210"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.414619 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" event={"ID":"068d0d7d-4bf9-4019-87da-451c2554d6d3","Type":"ContainerStarted","Data":"a1b5b0094f828616ae38869311f79149cededf92d8e2e5c66b7f17a5541b4d21"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.414669 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" event={"ID":"068d0d7d-4bf9-4019-87da-451c2554d6d3","Type":"ContainerStarted","Data":"d3bbd59e629330f241bc791f7cc7f890b539d712bce3d78bcc734d3bf7a5130e"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.419983 4690 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-vzblt container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" start-of-body= Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.420046 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" podUID="068d0d7d-4bf9-4019-87da-451c2554d6d3" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.32:5443/healthz\": dial tcp 10.217.0.32:5443: connect: connection refused" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.444412 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" event={"ID":"4ae7b233-0f48-40ef-8952-f01e9cc27d1d","Type":"ContainerStarted","Data":"d2d04df870affd2b6e94ecf30da18c1c1000d4f7c0b94f2fb8088f0db0f2dfa0"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.444460 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.446049 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.448465 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:04.948451288 +0000 UTC m=+211.238051231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.471933 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" podStartSLOduration=162.471916409 podStartE2EDuration="2m42.471916409s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.471213034 +0000 UTC m=+210.760812977" watchObservedRunningTime="2026-03-20 13:26:04.471916409 +0000 UTC m=+210.761516352" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.472913 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-st6cj" podStartSLOduration=162.472906573 podStartE2EDuration="2m42.472906573s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.350496763 +0000 UTC m=+210.640096716" watchObservedRunningTime="2026-03-20 13:26:04.472906573 +0000 UTC m=+210.762506516" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.494492 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-tqft4" event={"ID":"0b631f25-1a33-4d3f-9cd6-c932781e8c8b","Type":"ContainerStarted","Data":"db24a7c304c17decbb1143a5cff1723aea352c19000068d1d14259bb0a0fba1a"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.513063 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-2xhf5" event={"ID":"6623fd0e-357f-459b-8e04-6da531bf7b7b","Type":"ContainerStarted","Data":"f035ad88caebbc758d840d6eaf4607045a4fedd30689b9ff740ac6cd1cc29da6"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.529702 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-gftvr" podStartSLOduration=162.529686686 podStartE2EDuration="2m42.529686686s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.527266162 +0000 UTC m=+210.816866105" watchObservedRunningTime="2026-03-20 13:26:04.529686686 +0000 UTC m=+210.819286629" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.556122 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.557721 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.057699744 +0000 UTC m=+211.347299697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.565980 4690 generic.go:334] "Generic (PLEG): container finished" podID="aae7d681-bfbe-4280-92b9-f117157b6be8" containerID="bda6bdbc4d7241691cbf0edeecd8b30185950ee6e9824593c7a9ed54c95eb440" exitCode=0 Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.566301 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" event={"ID":"aae7d681-bfbe-4280-92b9-f117157b6be8","Type":"ContainerDied","Data":"bda6bdbc4d7241691cbf0edeecd8b30185950ee6e9824593c7a9ed54c95eb440"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.575942 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:04 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:04 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:04 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.578960 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.585316 4690 generic.go:334] "Generic (PLEG): container finished" podID="4c230027-5b5a-4c39-8594-c09c36112ab8" containerID="e1fac00be1083b49eb009d494a208e030e01cc199d216b156a0378f123ad1ce2" exitCode=0 Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.588955 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" event={"ID":"4c230027-5b5a-4c39-8594-c09c36112ab8","Type":"ContainerDied","Data":"e1fac00be1083b49eb009d494a208e030e01cc199d216b156a0378f123ad1ce2"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.589010 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.592217 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" podStartSLOduration=162.592199196 podStartE2EDuration="2m42.592199196s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.591269254 +0000 UTC m=+210.880869197" watchObservedRunningTime="2026-03-20 13:26:04.592199196 +0000 UTC m=+210.881799139" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.612363 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" event={"ID":"f4cdf5de-0994-48c7-967a-6271fdd5e023","Type":"ContainerStarted","Data":"10eb8013bef592e5c3abe231f0e4065602a291e9a817dbe61514c91fc7480bea"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.615488 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" event={"ID":"617c74e7-0a16-4376-822f-390d3c44c7c5","Type":"ContainerStarted","Data":"4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.616601 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.628481 4690 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5mwrz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.628720 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.647987 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-fcl8b" podStartSLOduration=162.647969404 podStartE2EDuration="2m42.647969404s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.645485488 +0000 UTC m=+210.935085431" watchObservedRunningTime="2026-03-20 13:26:04.647969404 +0000 UTC m=+210.937569357" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.657170 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" event={"ID":"248801a4-0271-4a76-93eb-efea07c28a24","Type":"ContainerStarted","Data":"9d5f4981b80e558885e15ee52620dab81b1332636a01b76ade51d6ea76fea721"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.657912 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.658321 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.158306802 +0000 UTC m=+211.447906745 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.661278 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.661645 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.682436 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" event={"ID":"b50351c5-e9c9-4ab8-b7c2-e2fa3c0beb1c","Type":"ContainerStarted","Data":"b54ca09f295230be6e2e7d115099c5726cd14619e18b7764196decdbb0ecb41b"} Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.700566 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-5t8t9" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.738009 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.758615 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.759952 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.259932844 +0000 UTC m=+211.549532787 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.769153 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" podStartSLOduration=162.769134942 podStartE2EDuration="2m42.769134942s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.768420018 +0000 UTC m=+211.058019961" watchObservedRunningTime="2026-03-20 13:26:04.769134942 +0000 UTC m=+211.058734885" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.862093 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.862408 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.362396266 +0000 UTC m=+211.651996199 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.963418 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:04 crc kubenswrapper[4690]: E0320 13:26:04.963719 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.463702168 +0000 UTC m=+211.753302111 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.990340 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-dv9wc" podStartSLOduration=162.990320928 podStartE2EDuration="2m42.990320928s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.965608904 +0000 UTC m=+211.255208847" watchObservedRunningTime="2026-03-20 13:26:04.990320928 +0000 UTC m=+211.279920871" Mar 20 13:26:04 crc kubenswrapper[4690]: I0320 13:26:04.991553 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" podStartSLOduration=162.99154556 podStartE2EDuration="2m42.99154556s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:04.990133122 +0000 UTC m=+211.279733065" watchObservedRunningTime="2026-03-20 13:26:04.99154556 +0000 UTC m=+211.281145503" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.065999 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.066375 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.566362067 +0000 UTC m=+211.855962010 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.142971 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-pd59k" podStartSLOduration=163.142945794 podStartE2EDuration="2m43.142945794s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:05.10321357 +0000 UTC m=+211.392813513" watchObservedRunningTime="2026-03-20 13:26:05.142945794 +0000 UTC m=+211.432545747" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.166921 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.167349 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.667331287 +0000 UTC m=+211.956931230 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.208702 4690 ???:1] "http: TLS handshake error from 192.168.126.11:42208: no serving certificate available for the kubelet" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.268038 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.268460 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.768445742 +0000 UTC m=+212.058045685 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.295635 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7d798ff474-dkkqx"] Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.295858 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c8072fb-34b5-4c43-895b-9ccb724b9199" containerName="controller-manager" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.295872 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c8072fb-34b5-4c43-895b-9ccb724b9199" containerName="controller-manager" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.295966 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c8072fb-34b5-4c43-895b-9ccb724b9199" containerName="controller-manager" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.296313 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.300933 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7d798ff474-dkkqx"] Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.368361 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.368575 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-client-ca\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.368630 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-config\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.368707 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-serving-cert\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.368749 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-proxy-ca-bundles\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.368770 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8blgj\" (UniqueName: \"kubernetes.io/projected/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-kube-api-access-8blgj\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.368919 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.868899504 +0000 UTC m=+212.158499457 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.470188 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-client-ca\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.471267 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-config\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.471312 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.471397 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-serving-cert\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.471448 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-proxy-ca-bundles\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.471476 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8blgj\" (UniqueName: \"kubernetes.io/projected/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-kube-api-access-8blgj\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.471199 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-client-ca\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.472974 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-config\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.477011 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-proxy-ca-bundles\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.477284 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:05.97725762 +0000 UTC m=+212.266857563 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.480337 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-serving-cert\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.510739 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8blgj\" (UniqueName: \"kubernetes.io/projected/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-kube-api-access-8blgj\") pod \"controller-manager-7d798ff474-dkkqx\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.568211 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:05 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:05 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:05 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.568590 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.577297 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.577683 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.077666861 +0000 UTC m=+212.367266804 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.643262 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.678645 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.679096 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.179084996 +0000 UTC m=+212.468684939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.702270 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.702348 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-fdlk7" event={"ID":"5c8072fb-34b5-4c43-895b-9ccb724b9199","Type":"ContainerDied","Data":"924f96e428f8c8e0d81f9a458c48d52a832d10aee0bf7a70f5ff11f75527c6e1"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.702396 4690 scope.go:117] "RemoveContainer" containerID="b58f46ebf351c60d412a292be702091397054d895ee8ff16acb322fb00d89d2d" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.710754 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" event={"ID":"7f3e5030-65a9-4876-a146-7087d00a33ba","Type":"ContainerStarted","Data":"2e5ea5ca2259ab2d3237bbf7d74e1b1592fb5ec89d02646706d5862414633126"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.712032 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.722423 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" event={"ID":"248801a4-0271-4a76-93eb-efea07c28a24","Type":"ContainerStarted","Data":"6573b8921443e4c1ae080372fe5ab09ecf66cf086e46fd880036809562a9b25e"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.722470 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" event={"ID":"248801a4-0271-4a76-93eb-efea07c28a24","Type":"ContainerStarted","Data":"358cd2f7684fe5d56ac9e808c72563f870fb7c4ef802fbc18cec17801a4c3b35"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.728478 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" event={"ID":"ec3e325e-1789-4918-9ebf-dabed8ba1408","Type":"ContainerStarted","Data":"39ae2ebc56ed3b105260c61c1ca2e9ddc330031e7627d1aafe56f92e7bdfd007"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.728520 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" event={"ID":"ec3e325e-1789-4918-9ebf-dabed8ba1408","Type":"ContainerStarted","Data":"82a8e5b530005d2d7693e6054236657fd7653b2d924593644d5bd0fdfe21ba96"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.736924 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" event={"ID":"aae7d681-bfbe-4280-92b9-f117157b6be8","Type":"ContainerStarted","Data":"1d615cd6beb00f2f54d2e4bee0bba009650c9f02228cee708d754c8f0f838c6d"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.740071 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-69ksw" event={"ID":"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7","Type":"ContainerStarted","Data":"5ee9994f3ded807ece45fd5df097481f7d2322cc7adf5bc5cdbbb140466e10de"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.743391 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" event={"ID":"5ae042df-e0db-4bd4-b519-22c2ab7ac732","Type":"ContainerStarted","Data":"3459ab6585125231f94ed339bf9970fc6b5f333aa6017b0053d1933708364efd"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.746168 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" event={"ID":"93bf028e-dcbf-4438-bc7a-2003c9f2e88a","Type":"ContainerStarted","Data":"c6c6698634ad0d01e4d70917ed94fce2fbf40a025a20b64efb040fca0acb603b"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.747103 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.749341 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" podStartSLOduration=163.749321544 podStartE2EDuration="2m43.749321544s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:05.731597441 +0000 UTC m=+212.021197394" watchObservedRunningTime="2026-03-20 13:26:05.749321544 +0000 UTC m=+212.038921487" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.749614 4690 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-hxnj4 container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.749655 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" podUID="93bf028e-dcbf-4438-bc7a-2003c9f2e88a" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.754399 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-l547r" event={"ID":"a980fba9-5b4e-4042-9e79-b816bac0bc19","Type":"ContainerStarted","Data":"7e8ddcd9131cff5e3634d8e48f6b8fc2162eb5780204ae6612fb96ecb80aa162"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.756613 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-st5qs" event={"ID":"8f79ce18-3787-4106-882d-73271ab0018d","Type":"ContainerStarted","Data":"e7fc2f04645032cdbc8b7e83cd50735fe251d8dd374492999dd0cef8cfa11376"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.775700 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" event={"ID":"4c230027-5b5a-4c39-8594-c09c36112ab8","Type":"ContainerStarted","Data":"c58b9395c472accb61cadff5bd0f6d842b740a6f2f3db431e2c38b7111c4bb61"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.781354 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.781664 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.281644661 +0000 UTC m=+212.571244604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.789709 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-g69d4" podStartSLOduration=163.78969288 podStartE2EDuration="2m43.78969288s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:05.772255227 +0000 UTC m=+212.061855170" watchObservedRunningTime="2026-03-20 13:26:05.78969288 +0000 UTC m=+212.079292823" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.790581 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fdlk7"] Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.794034 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-tqft4" event={"ID":"0b631f25-1a33-4d3f-9cd6-c932781e8c8b","Type":"ContainerStarted","Data":"0d0e75df6d2eee612c01fc386f2babec796cddd479da1e96527dd606488fb16d"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.794066 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-tqft4" event={"ID":"0b631f25-1a33-4d3f-9cd6-c932781e8c8b","Type":"ContainerStarted","Data":"d393bc4319329671702bc90c1b5b90e98036a6a0be90fd490453238449f7e174"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.794179 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-tqft4" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.796225 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-fdlk7"] Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.816274 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" event={"ID":"e26c3b04-aca8-46b5-a653-8257d4458d5f","Type":"ContainerStarted","Data":"e523b58ff84cc79fe71da8ec0c2710fd9544baada4ad26ef4700ac65249bdcf2"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.816323 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" event={"ID":"e26c3b04-aca8-46b5-a653-8257d4458d5f","Type":"ContainerStarted","Data":"3b9cd6a1c241a72fa3d7f5649a998be6db53e2a4d18ab63b0949b8bd585ea245"} Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.819396 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" podUID="345c7db2-4067-402c-bddf-3a497a9540c2" containerName="route-controller-manager" containerID="cri-o://0c931b3ea66caaa0c191519844ad432f09f6eef14007624ede266966732d2ba0" gracePeriod=30 Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.824872 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-jrwdq" podStartSLOduration=163.824837965 podStartE2EDuration="2m43.824837965s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:05.822052228 +0000 UTC m=+212.111652191" watchObservedRunningTime="2026-03-20 13:26:05.824837965 +0000 UTC m=+212.114437908" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.828000 4690 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5mwrz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.828059 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.834101 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-vks5p" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.837572 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-m8bdh" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.839479 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.874167 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" podStartSLOduration=163.874143639 podStartE2EDuration="2m43.874143639s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:05.86838425 +0000 UTC m=+212.157984193" watchObservedRunningTime="2026-03-20 13:26:05.874143639 +0000 UTC m=+212.163743582" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.889952 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.891505 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.391493019 +0000 UTC m=+212.681092962 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.905836 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-brvzm" podStartSLOduration=163.905816164 podStartE2EDuration="2m43.905816164s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:05.902032173 +0000 UTC m=+212.191632116" watchObservedRunningTime="2026-03-20 13:26:05.905816164 +0000 UTC m=+212.195416107" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.923390 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-tqft4" podStartSLOduration=8.9233688 podStartE2EDuration="8.9233688s" podCreationTimestamp="2026-03-20 13:25:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:05.922709588 +0000 UTC m=+212.212309531" watchObservedRunningTime="2026-03-20 13:26:05.9233688 +0000 UTC m=+212.212968743" Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.977340 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-xfzzm" podStartSLOduration=163.977324095 podStartE2EDuration="2m43.977324095s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:05.97515627 +0000 UTC m=+212.264756223" watchObservedRunningTime="2026-03-20 13:26:05.977324095 +0000 UTC m=+212.266924038" Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.993601 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.493572307 +0000 UTC m=+212.783172250 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.991733 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:05 crc kubenswrapper[4690]: I0320 13:26:05.999094 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:05 crc kubenswrapper[4690]: E0320 13:26:05.999603 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.499587695 +0000 UTC m=+212.789187638 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.020765 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" podStartSLOduration=165.020742986 podStartE2EDuration="2m45.020742986s" podCreationTimestamp="2026-03-20 13:23:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:06.01767388 +0000 UTC m=+212.307273833" watchObservedRunningTime="2026-03-20 13:26:06.020742986 +0000 UTC m=+212.310342919" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.089374 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7d798ff474-dkkqx"] Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.104952 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.105360 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.605341661 +0000 UTC m=+212.894941604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.206762 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.207083 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.707070667 +0000 UTC m=+212.996670610 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.254531 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4rnwq"] Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.256679 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.260598 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.285667 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-vzblt" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.287071 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4rnwq"] Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.307400 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.307676 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-utilities\") pod \"community-operators-4rnwq\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.307697 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-catalog-content\") pod \"community-operators-4rnwq\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.307714 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.807695885 +0000 UTC m=+213.097295828 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.307758 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7qh7\" (UniqueName: \"kubernetes.io/projected/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-kube-api-access-l7qh7\") pod \"community-operators-4rnwq\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.414761 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.414839 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-utilities\") pod \"community-operators-4rnwq\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.414880 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-catalog-content\") pod \"community-operators-4rnwq\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.414945 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7qh7\" (UniqueName: \"kubernetes.io/projected/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-kube-api-access-l7qh7\") pod \"community-operators-4rnwq\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.415547 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:06.915534143 +0000 UTC m=+213.205134086 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.416009 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-utilities\") pod \"community-operators-4rnwq\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.416272 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-catalog-content\") pod \"community-operators-4rnwq\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.435482 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c8072fb-34b5-4c43-895b-9ccb724b9199" path="/var/lib/kubelet/pods/5c8072fb-34b5-4c43-895b-9ccb724b9199/volumes" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.450884 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7qh7\" (UniqueName: \"kubernetes.io/projected/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-kube-api-access-l7qh7\") pod \"community-operators-4rnwq\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.454031 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2485h"] Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.455165 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.466920 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.471615 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2485h"] Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.515978 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.516392 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hppjq\" (UniqueName: \"kubernetes.io/projected/62e83612-6289-48a8-a3bb-4488048279f7-kube-api-access-hppjq\") pod \"certified-operators-2485h\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.516429 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-utilities\") pod \"certified-operators-2485h\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.516492 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-catalog-content\") pod \"certified-operators-2485h\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.516584 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.016567885 +0000 UTC m=+213.306167828 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.572609 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:06 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:06 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:06 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.572656 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.591340 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.618247 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.618309 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hppjq\" (UniqueName: \"kubernetes.io/projected/62e83612-6289-48a8-a3bb-4488048279f7-kube-api-access-hppjq\") pod \"certified-operators-2485h\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.618339 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-utilities\") pod \"certified-operators-2485h\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.618401 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-catalog-content\") pod \"certified-operators-2485h\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.618778 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-catalog-content\") pod \"certified-operators-2485h\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.619026 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-utilities\") pod \"certified-operators-2485h\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.619147 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.119136941 +0000 UTC m=+213.408736884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.630072 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vwvxj"] Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.631077 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.644580 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hppjq\" (UniqueName: \"kubernetes.io/projected/62e83612-6289-48a8-a3bb-4488048279f7-kube-api-access-hppjq\") pod \"certified-operators-2485h\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.660006 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vwvxj"] Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.719202 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.719446 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-catalog-content\") pod \"community-operators-vwvxj\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.719467 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46fr2\" (UniqueName: \"kubernetes.io/projected/b8184a4a-79e5-491e-8e56-ebf0bea4601f-kube-api-access-46fr2\") pod \"community-operators-vwvxj\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.719524 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-utilities\") pod \"community-operators-vwvxj\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.719626 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.219610874 +0000 UTC m=+213.509210817 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.821091 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-utilities\") pod \"community-operators-vwvxj\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.822064 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.821985 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-utilities\") pod \"community-operators-vwvxj\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.822512 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.3224986 +0000 UTC m=+213.612098543 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.822881 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-catalog-content\") pod \"community-operators-vwvxj\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.822901 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46fr2\" (UniqueName: \"kubernetes.io/projected/b8184a4a-79e5-491e-8e56-ebf0bea4601f-kube-api-access-46fr2\") pod \"community-operators-vwvxj\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.823170 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-catalog-content\") pod \"community-operators-vwvxj\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.823761 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-b8fwc"] Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.824055 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.825090 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.851283 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" event={"ID":"aae7d681-bfbe-4280-92b9-f117157b6be8","Type":"ContainerStarted","Data":"43c4d9af2762afaa0f424398bfbf765f37e1d73b11d21a23cb3de69873f1dfe8"} Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.851916 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46fr2\" (UniqueName: \"kubernetes.io/projected/b8184a4a-79e5-491e-8e56-ebf0bea4601f-kube-api-access-46fr2\") pod \"community-operators-vwvxj\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.859062 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" event={"ID":"238466ff-44c9-4e64-9aa3-2f9d2cae17cf","Type":"ContainerStarted","Data":"14b5ed630f3f8548e5abd160a7df429f0b1d4f619f36081b0bcab8752c88d415"} Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.859114 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" event={"ID":"238466ff-44c9-4e64-9aa3-2f9d2cae17cf","Type":"ContainerStarted","Data":"75442c2764c2303a2a11fa85334f30e9a23920f82247e1a1c1c4f9676225c59f"} Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.860106 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.867640 4690 generic.go:334] "Generic (PLEG): container finished" podID="345c7db2-4067-402c-bddf-3a497a9540c2" containerID="0c931b3ea66caaa0c191519844ad432f09f6eef14007624ede266966732d2ba0" exitCode=0 Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.867695 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" event={"ID":"345c7db2-4067-402c-bddf-3a497a9540c2","Type":"ContainerDied","Data":"0c931b3ea66caaa0c191519844ad432f09f6eef14007624ede266966732d2ba0"} Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.875436 4690 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-5mwrz container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.875471 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.875673 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b8fwc"] Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.894222 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.913296 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" podStartSLOduration=3.9132821780000002 podStartE2EDuration="3.913282178s" podCreationTimestamp="2026-03-20 13:26:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:06.910374968 +0000 UTC m=+213.199974911" watchObservedRunningTime="2026-03-20 13:26:06.913282178 +0000 UTC m=+213.202882121" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.924931 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.925137 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-catalog-content\") pod \"certified-operators-b8fwc\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.927106 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-hxnj4" Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.932000 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.431938473 +0000 UTC m=+213.721538426 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.932986 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.933207 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zch7\" (UniqueName: \"kubernetes.io/projected/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-kube-api-access-2zch7\") pod \"certified-operators-b8fwc\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:06 crc kubenswrapper[4690]: I0320 13:26:06.933295 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-utilities\") pod \"certified-operators-b8fwc\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:06 crc kubenswrapper[4690]: E0320 13:26:06.949416 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.449390266 +0000 UTC m=+213.738990279 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.015120 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.034328 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.034595 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zch7\" (UniqueName: \"kubernetes.io/projected/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-kube-api-access-2zch7\") pod \"certified-operators-b8fwc\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.034627 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-utilities\") pod \"certified-operators-b8fwc\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.034670 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-catalog-content\") pod \"certified-operators-b8fwc\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.035129 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-catalog-content\") pod \"certified-operators-b8fwc\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.035200 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.535186102 +0000 UTC m=+213.824786045 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.035607 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-utilities\") pod \"certified-operators-b8fwc\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.064557 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zch7\" (UniqueName: \"kubernetes.io/projected/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-kube-api-access-2zch7\") pod \"certified-operators-b8fwc\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.092170 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.143302 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stm9g\" (UniqueName: \"kubernetes.io/projected/345c7db2-4067-402c-bddf-3a497a9540c2-kube-api-access-stm9g\") pod \"345c7db2-4067-402c-bddf-3a497a9540c2\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.143530 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-config\") pod \"345c7db2-4067-402c-bddf-3a497a9540c2\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.143570 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-client-ca\") pod \"345c7db2-4067-402c-bddf-3a497a9540c2\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.143593 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/345c7db2-4067-402c-bddf-3a497a9540c2-serving-cert\") pod \"345c7db2-4067-402c-bddf-3a497a9540c2\" (UID: \"345c7db2-4067-402c-bddf-3a497a9540c2\") " Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.143769 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.144109 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.644097537 +0000 UTC m=+213.933697480 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.144716 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-client-ca" (OuterVolumeSpecName: "client-ca") pod "345c7db2-4067-402c-bddf-3a497a9540c2" (UID: "345c7db2-4067-402c-bddf-3a497a9540c2"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.144831 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-config" (OuterVolumeSpecName: "config") pod "345c7db2-4067-402c-bddf-3a497a9540c2" (UID: "345c7db2-4067-402c-bddf-3a497a9540c2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.150426 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/345c7db2-4067-402c-bddf-3a497a9540c2-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "345c7db2-4067-402c-bddf-3a497a9540c2" (UID: "345c7db2-4067-402c-bddf-3a497a9540c2"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.157223 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/345c7db2-4067-402c-bddf-3a497a9540c2-kube-api-access-stm9g" (OuterVolumeSpecName: "kube-api-access-stm9g") pod "345c7db2-4067-402c-bddf-3a497a9540c2" (UID: "345c7db2-4067-402c-bddf-3a497a9540c2"). InnerVolumeSpecName "kube-api-access-stm9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.169766 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4rnwq"] Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.199136 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.244424 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.244647 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.744619801 +0000 UTC m=+214.034219744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.244729 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.244802 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stm9g\" (UniqueName: \"kubernetes.io/projected/345c7db2-4067-402c-bddf-3a497a9540c2-kube-api-access-stm9g\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.244815 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.244824 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/345c7db2-4067-402c-bddf-3a497a9540c2-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.244831 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/345c7db2-4067-402c-bddf-3a497a9540c2-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.245128 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.745116039 +0000 UTC m=+214.034715982 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.290421 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg"] Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.290931 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="345c7db2-4067-402c-bddf-3a497a9540c2" containerName="route-controller-manager" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.290942 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="345c7db2-4067-402c-bddf-3a497a9540c2" containerName="route-controller-manager" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.291032 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="345c7db2-4067-402c-bddf-3a497a9540c2" containerName="route-controller-manager" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.292438 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.314239 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg"] Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.346725 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.347082 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8sm7\" (UniqueName: \"kubernetes.io/projected/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-kube-api-access-l8sm7\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.347271 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.847250929 +0000 UTC m=+214.136850882 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.347337 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-client-ca\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.347404 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.347431 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-config\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.347514 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-serving-cert\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.347981 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.847970624 +0000 UTC m=+214.137570567 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.448984 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.449291 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-config\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.449388 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-serving-cert\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.449454 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:07.949419321 +0000 UTC m=+214.239019294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.449497 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8sm7\" (UniqueName: \"kubernetes.io/projected/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-kube-api-access-l8sm7\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.449702 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-client-ca\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.450714 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-client-ca\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.451669 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-config\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.458785 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-serving-cert\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.476328 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8sm7\" (UniqueName: \"kubernetes.io/projected/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-kube-api-access-l8sm7\") pod \"route-controller-manager-588bd79dbd-fgspg\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.554511 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.554899 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.054885346 +0000 UTC m=+214.344485289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.564706 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:07 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:07 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:07 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.564748 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.577213 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2485h"] Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.645169 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.666460 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.666747 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.166732872 +0000 UTC m=+214.456332815 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.768577 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.769406 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.269393801 +0000 UTC m=+214.558993744 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.820897 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vwvxj"] Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.869489 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.873823 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.373775189 +0000 UTC m=+214.663375132 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.883638 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2485h" event={"ID":"62e83612-6289-48a8-a3bb-4488048279f7","Type":"ContainerStarted","Data":"001bed812baae8e53f7fa68d4f8c93a78f36eef624dfb13c115d32bb0a62b0f4"} Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.887302 4690 generic.go:334] "Generic (PLEG): container finished" podID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerID="23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76" exitCode=0 Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.887458 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rnwq" event={"ID":"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6","Type":"ContainerDied","Data":"23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76"} Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.887508 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rnwq" event={"ID":"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6","Type":"ContainerStarted","Data":"a808e10e40f08713ff7e100081d503aa1f94b80ff4b5d0d2714b5918bba76780"} Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.919821 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-69ksw" event={"ID":"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7","Type":"ContainerStarted","Data":"541e623896b54b58a77c1186573b0c6d7772dac28256cd82aca696a184fcd99f"} Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.928036 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvxj" event={"ID":"b8184a4a-79e5-491e-8e56-ebf0bea4601f","Type":"ContainerStarted","Data":"1a23811a389da7dc02bd4e79b357d5b931f5443405fb23c1939da945a1c51e96"} Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.934051 4690 generic.go:334] "Generic (PLEG): container finished" podID="53349f20-095b-4c88-b827-f3d6d09c15fc" containerID="a2ee321c4f4f1bbfce13b2edc5de45a201a7b7bc8f2b7794b26b12d75cf68200" exitCode=0 Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.934114 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" event={"ID":"53349f20-095b-4c88-b827-f3d6d09c15fc","Type":"ContainerDied","Data":"a2ee321c4f4f1bbfce13b2edc5de45a201a7b7bc8f2b7794b26b12d75cf68200"} Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.942750 4690 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.960902 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.962466 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp" event={"ID":"345c7db2-4067-402c-bddf-3a497a9540c2","Type":"ContainerDied","Data":"0370f1b3f4825a008b102701497deff3293b7c44211b709b49ea935560f51922"} Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.962520 4690 scope.go:117] "RemoveContainer" containerID="0c931b3ea66caaa0c191519844ad432f09f6eef14007624ede266966732d2ba0" Mar 20 13:26:07 crc kubenswrapper[4690]: I0320 13:26:07.974627 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:07 crc kubenswrapper[4690]: E0320 13:26:07.975026 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.475014208 +0000 UTC m=+214.764614151 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.010333 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp"] Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.014572 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-p4fhp"] Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.018252 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-b8fwc"] Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.028262 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg"] Mar 20 13:26:08 crc kubenswrapper[4690]: W0320 13:26:08.036963 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode38d71d2_09b1_4ff8_b9df_91da1e2b97cb.slice/crio-53363d59764e451a993d125df6416f58ac6edd1d58a58e665e1805181c841f6d WatchSource:0}: Error finding container 53363d59764e451a993d125df6416f58ac6edd1d58a58e665e1805181c841f6d: Status 404 returned error can't find the container with id 53363d59764e451a993d125df6416f58ac6edd1d58a58e665e1805181c841f6d Mar 20 13:26:08 crc kubenswrapper[4690]: W0320 13:26:08.049158 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod80d2d6ba_80aa_421e_b3bb_64ebd2b09371.slice/crio-c7abaa2c9737efd2e60d9319cfc400b903d309e8b77c67c64ead51b7644a7f0d WatchSource:0}: Error finding container c7abaa2c9737efd2e60d9319cfc400b903d309e8b77c67c64ead51b7644a7f0d: Status 404 returned error can't find the container with id c7abaa2c9737efd2e60d9319cfc400b903d309e8b77c67c64ead51b7644a7f0d Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.075409 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.075550 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.575527292 +0000 UTC m=+214.865127235 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.075877 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.078554 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.578538236 +0000 UTC m=+214.868138179 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.177567 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.178163 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.678134409 +0000 UTC m=+214.967734352 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.221440 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-snbxt"] Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.222506 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.226275 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.232463 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-snbxt"] Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.280433 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.280517 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-catalog-content\") pod \"redhat-marketplace-snbxt\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.280614 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w78vg\" (UniqueName: \"kubernetes.io/projected/c91a8b76-7263-4b29-ac22-b1459fe1f35b-kube-api-access-w78vg\") pod \"redhat-marketplace-snbxt\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.280673 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-utilities\") pod \"redhat-marketplace-snbxt\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.281070 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.781057386 +0000 UTC m=+215.070657329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.359215 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.359272 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.360860 4690 patch_prober.go:28] interesting pod/console-f9d7485db-h2jxx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.360958 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-h2jxx" podUID="74952b15-473b-462f-a05f-6c00433ed4d5" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.381769 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.381979 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.881952364 +0000 UTC m=+215.171552307 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.382283 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w78vg\" (UniqueName: \"kubernetes.io/projected/c91a8b76-7263-4b29-ac22-b1459fe1f35b-kube-api-access-w78vg\") pod \"redhat-marketplace-snbxt\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.382353 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-utilities\") pod \"redhat-marketplace-snbxt\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.382393 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.382438 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-catalog-content\") pod \"redhat-marketplace-snbxt\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.382963 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-catalog-content\") pod \"redhat-marketplace-snbxt\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.383118 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-utilities\") pod \"redhat-marketplace-snbxt\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.383271 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.883256619 +0000 UTC m=+215.172856652 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.403216 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w78vg\" (UniqueName: \"kubernetes.io/projected/c91a8b76-7263-4b29-ac22-b1459fe1f35b-kube-api-access-w78vg\") pod \"redhat-marketplace-snbxt\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.423069 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="345c7db2-4067-402c-bddf-3a497a9540c2" path="/var/lib/kubelet/pods/345c7db2-4067-402c-bddf-3a497a9540c2/volumes" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.483175 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.484256 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:08.98424043 +0000 UTC m=+215.273840373 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.563723 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.568034 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:08 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:08 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:08 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.568079 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.584649 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.585063 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:09.085048424 +0000 UTC m=+215.374648367 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.624492 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-k9f68"] Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.625474 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.639395 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9f68"] Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.688522 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.688789 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-03-20 13:26:09.188734748 +0000 UTC m=+215.478334691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.689367 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.689467 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flznw\" (UniqueName: \"kubernetes.io/projected/b33d4d78-4e84-48e1-9b17-8427e0bd042e-kube-api-access-flznw\") pod \"redhat-marketplace-k9f68\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.689497 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-catalog-content\") pod \"redhat-marketplace-k9f68\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.689564 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-utilities\") pod \"redhat-marketplace-k9f68\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: E0320 13:26:08.690024 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-03-20 13:26:09.190007842 +0000 UTC m=+215.479607785 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-pkvkz" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.690440 4690 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-03-20T13:26:07.942770554Z","Handler":null,"Name":""} Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.694913 4690 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.694953 4690 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.791232 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.791481 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-utilities\") pod \"redhat-marketplace-k9f68\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.791586 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flznw\" (UniqueName: \"kubernetes.io/projected/b33d4d78-4e84-48e1-9b17-8427e0bd042e-kube-api-access-flznw\") pod \"redhat-marketplace-k9f68\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.791624 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-catalog-content\") pod \"redhat-marketplace-k9f68\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.792420 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-catalog-content\") pod \"redhat-marketplace-k9f68\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.792454 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-utilities\") pod \"redhat-marketplace-k9f68\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.797164 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.832364 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flznw\" (UniqueName: \"kubernetes.io/projected/b33d4d78-4e84-48e1-9b17-8427e0bd042e-kube-api-access-flznw\") pod \"redhat-marketplace-k9f68\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.863439 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-mn9nn" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.876592 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-snbxt"] Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.892457 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.898441 4690 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.898486 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.939443 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-pkvkz\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.942013 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.979369 4690 generic.go:334] "Generic (PLEG): container finished" podID="62e83612-6289-48a8-a3bb-4488048279f7" containerID="ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b" exitCode=0 Mar 20 13:26:08 crc kubenswrapper[4690]: I0320 13:26:08.979740 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2485h" event={"ID":"62e83612-6289-48a8-a3bb-4488048279f7","Type":"ContainerDied","Data":"ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b"} Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.008018 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" event={"ID":"80d2d6ba-80aa-421e-b3bb-64ebd2b09371","Type":"ContainerStarted","Data":"f803376e8c51a696cb6980b985aef7dac2d0d513700612f5b5b45d95845f8f2c"} Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.008053 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" event={"ID":"80d2d6ba-80aa-421e-b3bb-64ebd2b09371","Type":"ContainerStarted","Data":"c7abaa2c9737efd2e60d9319cfc400b903d309e8b77c67c64ead51b7644a7f0d"} Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.008403 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.009902 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-snbxt" event={"ID":"c91a8b76-7263-4b29-ac22-b1459fe1f35b","Type":"ContainerStarted","Data":"b5eb9c24353c805594f74d0a945c5b452cbc25415ee55e3e6eb68d9709a52694"} Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.018801 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.020814 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-69ksw" event={"ID":"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7","Type":"ContainerStarted","Data":"bc3f8211e171bc3fa38f0146892e443e697acc491d8a969e9a6552d6b26c56fd"} Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.020892 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-69ksw" event={"ID":"76cc7bbf-d280-42bf-ae62-b34fc6a3d6b7","Type":"ContainerStarted","Data":"2da1d15734c2b32733168a8002e2fc0363f466f4001c2b7b8f8bdefff8172418"} Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.025660 4690 generic.go:334] "Generic (PLEG): container finished" podID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerID="77519c387aa0e2acd33b6c0772854e05e13fbd414c8f44de6b75ec3564abb188" exitCode=0 Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.025729 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvxj" event={"ID":"b8184a4a-79e5-491e-8e56-ebf0bea4601f","Type":"ContainerDied","Data":"77519c387aa0e2acd33b6c0772854e05e13fbd414c8f44de6b75ec3564abb188"} Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.035220 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" podStartSLOduration=6.035201564 podStartE2EDuration="6.035201564s" podCreationTimestamp="2026-03-20 13:26:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:09.031806117 +0000 UTC m=+215.321406070" watchObservedRunningTime="2026-03-20 13:26:09.035201564 +0000 UTC m=+215.324801507" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.048335 4690 generic.go:334] "Generic (PLEG): container finished" podID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerID="f087f74a8a7f1068b6844b2f1f2ea22618c2efff5adfc068a3d7258ef4848e54" exitCode=0 Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.048800 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b8fwc" event={"ID":"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb","Type":"ContainerDied","Data":"f087f74a8a7f1068b6844b2f1f2ea22618c2efff5adfc068a3d7258ef4848e54"} Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.048835 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b8fwc" event={"ID":"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb","Type":"ContainerStarted","Data":"53363d59764e451a993d125df6416f58ac6edd1d58a58e665e1805181c841f6d"} Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.057669 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.079773 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.080383 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.082231 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.083602 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.119022 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.121127 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-69ksw" podStartSLOduration=12.121107514 podStartE2EDuration="12.121107514s" podCreationTimestamp="2026-03-20 13:25:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:09.111971798 +0000 UTC m=+215.401571741" watchObservedRunningTime="2026-03-20 13:26:09.121107514 +0000 UTC m=+215.410707457" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.216241 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5196eff0-8a75-4c63-b081-aea31e9d1b99-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5196eff0-8a75-4c63-b081-aea31e9d1b99\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.216363 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5196eff0-8a75-4c63-b081-aea31e9d1b99-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5196eff0-8a75-4c63-b081-aea31e9d1b99\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.317215 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5196eff0-8a75-4c63-b081-aea31e9d1b99-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5196eff0-8a75-4c63-b081-aea31e9d1b99\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.317313 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5196eff0-8a75-4c63-b081-aea31e9d1b99-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5196eff0-8a75-4c63-b081-aea31e9d1b99\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.317519 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5196eff0-8a75-4c63-b081-aea31e9d1b99-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"5196eff0-8a75-4c63-b081-aea31e9d1b99\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.336711 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.337016 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5196eff0-8a75-4c63-b081-aea31e9d1b99-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"5196eff0-8a75-4c63-b081-aea31e9d1b99\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.408016 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.418092 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53349f20-095b-4c88-b827-f3d6d09c15fc-config-volume\") pod \"53349f20-095b-4c88-b827-f3d6d09c15fc\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.418149 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xppjl\" (UniqueName: \"kubernetes.io/projected/53349f20-095b-4c88-b827-f3d6d09c15fc-kube-api-access-xppjl\") pod \"53349f20-095b-4c88-b827-f3d6d09c15fc\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.418247 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53349f20-095b-4c88-b827-f3d6d09c15fc-secret-volume\") pod \"53349f20-095b-4c88-b827-f3d6d09c15fc\" (UID: \"53349f20-095b-4c88-b827-f3d6d09c15fc\") " Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.419406 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/53349f20-095b-4c88-b827-f3d6d09c15fc-config-volume" (OuterVolumeSpecName: "config-volume") pod "53349f20-095b-4c88-b827-f3d6d09c15fc" (UID: "53349f20-095b-4c88-b827-f3d6d09c15fc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.421672 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zwzf4"] Mar 20 13:26:09 crc kubenswrapper[4690]: E0320 13:26:09.421887 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53349f20-095b-4c88-b827-f3d6d09c15fc" containerName="collect-profiles" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.421899 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="53349f20-095b-4c88-b827-f3d6d09c15fc" containerName="collect-profiles" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.422010 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="53349f20-095b-4c88-b827-f3d6d09c15fc" containerName="collect-profiles" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.422434 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53349f20-095b-4c88-b827-f3d6d09c15fc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "53349f20-095b-4c88-b827-f3d6d09c15fc" (UID: "53349f20-095b-4c88-b827-f3d6d09c15fc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.422703 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.424406 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53349f20-095b-4c88-b827-f3d6d09c15fc-kube-api-access-xppjl" (OuterVolumeSpecName: "kube-api-access-xppjl") pod "53349f20-095b-4c88-b827-f3d6d09c15fc" (UID: "53349f20-095b-4c88-b827-f3d6d09c15fc"). InnerVolumeSpecName "kube-api-access-xppjl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.424731 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.431311 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zwzf4"] Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.519494 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5z69\" (UniqueName: \"kubernetes.io/projected/3978f4ce-bf05-41c1-b941-c5927fec1785-kube-api-access-x5z69\") pod \"redhat-operators-zwzf4\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.519536 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-catalog-content\") pod \"redhat-operators-zwzf4\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.519555 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-utilities\") pod \"redhat-operators-zwzf4\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.519650 4690 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/53349f20-095b-4c88-b827-f3d6d09c15fc-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.519672 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xppjl\" (UniqueName: \"kubernetes.io/projected/53349f20-095b-4c88-b827-f3d6d09c15fc-kube-api-access-xppjl\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.519685 4690 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/53349f20-095b-4c88-b827-f3d6d09c15fc-secret-volume\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.527155 4690 patch_prober.go:28] interesting pod/downloads-7954f5f757-7vkfd container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.527181 4690 patch_prober.go:28] interesting pod/downloads-7954f5f757-7vkfd container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" start-of-body= Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.527205 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-7vkfd" podUID="4874f99f-2938-475f-872a-c7a794ae4818" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.527209 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-7vkfd" podUID="4874f99f-2938-475f-872a-c7a794ae4818" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.11:8080/\": dial tcp 10.217.0.11:8080: connect: connection refused" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.568471 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:09 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:09 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:09 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.568536 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.592492 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9f68"] Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.620002 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkvkz"] Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.620754 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5z69\" (UniqueName: \"kubernetes.io/projected/3978f4ce-bf05-41c1-b941-c5927fec1785-kube-api-access-x5z69\") pod \"redhat-operators-zwzf4\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.620801 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-catalog-content\") pod \"redhat-operators-zwzf4\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.620825 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-utilities\") pod \"redhat-operators-zwzf4\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.621502 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-catalog-content\") pod \"redhat-operators-zwzf4\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.622401 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-utilities\") pod \"redhat-operators-zwzf4\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.645082 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5z69\" (UniqueName: \"kubernetes.io/projected/3978f4ce-bf05-41c1-b941-c5927fec1785-kube-api-access-x5z69\") pod \"redhat-operators-zwzf4\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: W0320 13:26:09.648721 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod704eed42_1e9b_4d8c_be9f_4d237658ae86.slice/crio-d23cb3c7f2664c0101ee3f9cd6d7f0aae4873e20be5117566c305e99812d4f53 WatchSource:0}: Error finding container d23cb3c7f2664c0101ee3f9cd6d7f0aae4873e20be5117566c305e99812d4f53: Status 404 returned error can't find the container with id d23cb3c7f2664c0101ee3f9cd6d7f0aae4873e20be5117566c305e99812d4f53 Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.681958 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.682273 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.688789 4690 patch_prober.go:28] interesting pod/apiserver-76f77b778f-6c8pc container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]log ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]etcd ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/start-apiserver-admission-initializer ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/generic-apiserver-start-informers ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/max-in-flight-filter ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/storage-object-count-tracker-hook ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/image.openshift.io-apiserver-caches ok Mar 20 13:26:09 crc kubenswrapper[4690]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Mar 20 13:26:09 crc kubenswrapper[4690]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/project.openshift.io-projectcache ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/openshift.io-startinformers ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/openshift.io-restmapperupdater ok Mar 20 13:26:09 crc kubenswrapper[4690]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Mar 20 13:26:09 crc kubenswrapper[4690]: livez check failed Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.688838 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" podUID="aae7d681-bfbe-4280-92b9-f117157b6be8" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.691213 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Mar 20 13:26:09 crc kubenswrapper[4690]: W0320 13:26:09.708023 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod5196eff0_8a75_4c63_b081_aea31e9d1b99.slice/crio-17ba85a6827a3e1d3f392d8028dd36a1a18c1cc1b3f2def75501543d794cf9f2 WatchSource:0}: Error finding container 17ba85a6827a3e1d3f392d8028dd36a1a18c1cc1b3f2def75501543d794cf9f2: Status 404 returned error can't find the container with id 17ba85a6827a3e1d3f392d8028dd36a1a18c1cc1b3f2def75501543d794cf9f2 Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.747430 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.821054 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gmjkl"] Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.824058 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.842608 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gmjkl"] Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.926588 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtpb5\" (UniqueName: \"kubernetes.io/projected/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-kube-api-access-wtpb5\") pod \"redhat-operators-gmjkl\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.926998 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-utilities\") pod \"redhat-operators-gmjkl\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:09 crc kubenswrapper[4690]: I0320 13:26:09.927063 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-catalog-content\") pod \"redhat-operators-gmjkl\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.027735 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtpb5\" (UniqueName: \"kubernetes.io/projected/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-kube-api-access-wtpb5\") pod \"redhat-operators-gmjkl\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.027919 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-utilities\") pod \"redhat-operators-gmjkl\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.027967 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-catalog-content\") pod \"redhat-operators-gmjkl\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.028353 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-catalog-content\") pod \"redhat-operators-gmjkl\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.028586 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-utilities\") pod \"redhat-operators-gmjkl\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.044859 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zwzf4"] Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.051589 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtpb5\" (UniqueName: \"kubernetes.io/projected/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-kube-api-access-wtpb5\") pod \"redhat-operators-gmjkl\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.063020 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.071628 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" event={"ID":"704eed42-1e9b-4d8c-be9f-4d237658ae86","Type":"ContainerStarted","Data":"acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e"} Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.071682 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" event={"ID":"704eed42-1e9b-4d8c-be9f-4d237658ae86","Type":"ContainerStarted","Data":"d23cb3c7f2664c0101ee3f9cd6d7f0aae4873e20be5117566c305e99812d4f53"} Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.071862 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.073172 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwzf4" event={"ID":"3978f4ce-bf05-41c1-b941-c5927fec1785","Type":"ContainerStarted","Data":"dbd01863dc5009d4a6e9c463300b352f3980ac292a1676b9e153d5e3b44010ae"} Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.076976 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.076993 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566875-g76qr" event={"ID":"53349f20-095b-4c88-b827-f3d6d09c15fc","Type":"ContainerDied","Data":"fa0741a966f1cb6e45ad1cc2a7b1fc4b4a336883e1ed4d97ac58eb9724300aa5"} Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.077046 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa0741a966f1cb6e45ad1cc2a7b1fc4b4a336883e1ed4d97ac58eb9724300aa5" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.079360 4690 generic.go:334] "Generic (PLEG): container finished" podID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerID="cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6" exitCode=0 Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.079417 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-snbxt" event={"ID":"c91a8b76-7263-4b29-ac22-b1459fe1f35b","Type":"ContainerDied","Data":"cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6"} Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.085092 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5196eff0-8a75-4c63-b081-aea31e9d1b99","Type":"ContainerStarted","Data":"17ba85a6827a3e1d3f392d8028dd36a1a18c1cc1b3f2def75501543d794cf9f2"} Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.093778 4690 generic.go:334] "Generic (PLEG): container finished" podID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerID="1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9" exitCode=0 Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.094625 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9f68" event={"ID":"b33d4d78-4e84-48e1-9b17-8427e0bd042e","Type":"ContainerDied","Data":"1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9"} Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.094691 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9f68" event={"ID":"b33d4d78-4e84-48e1-9b17-8427e0bd042e","Type":"ContainerStarted","Data":"fe147e96ea4a9be5f5b36cdaed816d6eba5d076a3ae594c2f9d086c839eb8d60"} Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.118638 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" podStartSLOduration=168.118621214 podStartE2EDuration="2m48.118621214s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:10.11791049 +0000 UTC m=+216.407510433" watchObservedRunningTime="2026-03-20 13:26:10.118621214 +0000 UTC m=+216.408221157" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.213129 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.358763 4690 ???:1] "http: TLS handshake error from 192.168.126.11:42220: no serving certificate available for the kubelet" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.426013 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.437663 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.438722 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.441476 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.447743 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.447920 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.537103 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.538022 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.561950 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.565180 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:10 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:10 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:10 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.565247 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.639253 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.639289 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.640317 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.669537 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.750447 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gmjkl"] Mar 20 13:26:10 crc kubenswrapper[4690]: I0320 13:26:10.766497 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.037203 4690 ???:1] "http: TLS handshake error from 192.168.126.11:54124: no serving certificate available for the kubelet" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.126275 4690 generic.go:334] "Generic (PLEG): container finished" podID="5196eff0-8a75-4c63-b081-aea31e9d1b99" containerID="64ecf70c8c22f20a3bd4168268b3e85bf9b70292fb06db9fba9e69f0a9f6937f" exitCode=0 Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.126337 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5196eff0-8a75-4c63-b081-aea31e9d1b99","Type":"ContainerDied","Data":"64ecf70c8c22f20a3bd4168268b3e85bf9b70292fb06db9fba9e69f0a9f6937f"} Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.131892 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmjkl" event={"ID":"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a","Type":"ContainerStarted","Data":"f82a19ad6323fc0ff28828b3dbfbc17bf6d059311fb5f4d6cce9311c585e694d"} Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.157236 4690 generic.go:334] "Generic (PLEG): container finished" podID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerID="437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d" exitCode=0 Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.157310 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwzf4" event={"ID":"3978f4ce-bf05-41c1-b941-c5927fec1785","Type":"ContainerDied","Data":"437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d"} Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.294394 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.353587 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.353690 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.353753 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.353781 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.354881 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.359524 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.360907 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.362394 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.456126 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.462970 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d459decc-f715-4636-bc35-963ae8133ec7-metrics-certs\") pod \"network-metrics-daemon-rpcmp\" (UID: \"d459decc-f715-4636-bc35-963ae8133ec7\") " pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.566423 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:11 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:11 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:11 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.566474 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.629882 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.639221 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.646271 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:26:11 crc kubenswrapper[4690]: I0320 13:26:11.652473 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-rpcmp" Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.185553 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8179ea9f-06c5-495c-a546-7fd9f5404b3c","Type":"ContainerStarted","Data":"dc61074ab438b861cd6089d0befe4885cf2254ce7898cce3c102ad01dcca4541"} Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.206655 4690 generic.go:334] "Generic (PLEG): container finished" podID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerID="79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b" exitCode=0 Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.207369 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmjkl" event={"ID":"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a","Type":"ContainerDied","Data":"79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b"} Mar 20 13:26:12 crc kubenswrapper[4690]: W0320 13:26:12.443610 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b6479f0_333b_4a96_9adf_2099afdc2447.slice/crio-81e89a89054eb4d27abd543314e369e4f61127c70b4b6863d1fd32e456d29143 WatchSource:0}: Error finding container 81e89a89054eb4d27abd543314e369e4f61127c70b4b6863d1fd32e456d29143: Status 404 returned error can't find the container with id 81e89a89054eb4d27abd543314e369e4f61127c70b4b6863d1fd32e456d29143 Mar 20 13:26:12 crc kubenswrapper[4690]: W0320 13:26:12.478379 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-b089f97e6e017a138b67e1a956d427d7178c270602dfbeb5cfba5cf67bdada96 WatchSource:0}: Error finding container b089f97e6e017a138b67e1a956d427d7178c270602dfbeb5cfba5cf67bdada96: Status 404 returned error can't find the container with id b089f97e6e017a138b67e1a956d427d7178c270602dfbeb5cfba5cf67bdada96 Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.488698 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-rpcmp"] Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.489558 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:12 crc kubenswrapper[4690]: W0320 13:26:12.503637 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd459decc_f715_4636_bc35_963ae8133ec7.slice/crio-72cfdfe8ef5cb97691418da3edfdbb5517912933a8684a605ddc261b333cdfb1 WatchSource:0}: Error finding container 72cfdfe8ef5cb97691418da3edfdbb5517912933a8684a605ddc261b333cdfb1: Status 404 returned error can't find the container with id 72cfdfe8ef5cb97691418da3edfdbb5517912933a8684a605ddc261b333cdfb1 Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.564758 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:12 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:12 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:12 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.564808 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.573713 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5196eff0-8a75-4c63-b081-aea31e9d1b99-kube-api-access\") pod \"5196eff0-8a75-4c63-b081-aea31e9d1b99\" (UID: \"5196eff0-8a75-4c63-b081-aea31e9d1b99\") " Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.573749 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5196eff0-8a75-4c63-b081-aea31e9d1b99-kubelet-dir\") pod \"5196eff0-8a75-4c63-b081-aea31e9d1b99\" (UID: \"5196eff0-8a75-4c63-b081-aea31e9d1b99\") " Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.574154 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5196eff0-8a75-4c63-b081-aea31e9d1b99-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5196eff0-8a75-4c63-b081-aea31e9d1b99" (UID: "5196eff0-8a75-4c63-b081-aea31e9d1b99"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.592144 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5196eff0-8a75-4c63-b081-aea31e9d1b99-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5196eff0-8a75-4c63-b081-aea31e9d1b99" (UID: "5196eff0-8a75-4c63-b081-aea31e9d1b99"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.675992 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5196eff0-8a75-4c63-b081-aea31e9d1b99-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:12 crc kubenswrapper[4690]: I0320 13:26:12.676041 4690 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5196eff0-8a75-4c63-b081-aea31e9d1b99-kubelet-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.218261 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"5196eff0-8a75-4c63-b081-aea31e9d1b99","Type":"ContainerDied","Data":"17ba85a6827a3e1d3f392d8028dd36a1a18c1cc1b3f2def75501543d794cf9f2"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.218903 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="17ba85a6827a3e1d3f392d8028dd36a1a18c1cc1b3f2def75501543d794cf9f2" Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.218324 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.221002 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"8e6019d91f4037c996c7bcc06ed3e84e078fe237f259ec57ae43b64a9a052f1c"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.221058 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"3ba1969ea1a8b7fe2615b174c2ab6252c1017ddff3453ac5613aa9b94de617c7"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.223645 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" event={"ID":"d459decc-f715-4636-bc35-963ae8133ec7","Type":"ContainerStarted","Data":"d66f7af5bb305b10c1ae1674f6e041637fd0920e09b26a8ff2028863b01fa33a"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.223673 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" event={"ID":"d459decc-f715-4636-bc35-963ae8133ec7","Type":"ContainerStarted","Data":"72cfdfe8ef5cb97691418da3edfdbb5517912933a8684a605ddc261b333cdfb1"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.227509 4690 generic.go:334] "Generic (PLEG): container finished" podID="8179ea9f-06c5-495c-a546-7fd9f5404b3c" containerID="d3b4d97a2fba65c5a397d26d4b5ba0a5ebef4e08a9f6865b44f609ef39de27fa" exitCode=0 Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.227638 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8179ea9f-06c5-495c-a546-7fd9f5404b3c","Type":"ContainerDied","Data":"d3b4d97a2fba65c5a397d26d4b5ba0a5ebef4e08a9f6865b44f609ef39de27fa"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.234072 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"73172fc3277e02c0c13b793a68aaf0613be9bea93f01ca97c35b4c3f2d8e19cb"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.234116 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"81e89a89054eb4d27abd543314e369e4f61127c70b4b6863d1fd32e456d29143"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.234318 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.244881 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"9eaf09d8abb7506225c3ce7ca751f16ecaf6186abb686fe0b0ac1b3181dab019"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.244924 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"b089f97e6e017a138b67e1a956d427d7178c270602dfbeb5cfba5cf67bdada96"} Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.564788 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:13 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:13 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:13 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:13 crc kubenswrapper[4690]: I0320 13:26:13.565159 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:14 crc kubenswrapper[4690]: I0320 13:26:14.260364 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-rpcmp" event={"ID":"d459decc-f715-4636-bc35-963ae8133ec7","Type":"ContainerStarted","Data":"fadc1ced290a0f0a8bbeaa58107e8638fd4a1bb4e59f005b039804d94f6e843f"} Mar 20 13:26:14 crc kubenswrapper[4690]: I0320 13:26:14.275736 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-rpcmp" podStartSLOduration=172.27571406 podStartE2EDuration="2m52.27571406s" podCreationTimestamp="2026-03-20 13:23:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:14.273055778 +0000 UTC m=+220.562655721" watchObservedRunningTime="2026-03-20 13:26:14.27571406 +0000 UTC m=+220.565314003" Mar 20 13:26:14 crc kubenswrapper[4690]: I0320 13:26:14.565054 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:14 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:14 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:14 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:14 crc kubenswrapper[4690]: I0320 13:26:14.565385 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:14 crc kubenswrapper[4690]: I0320 13:26:14.686601 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:26:14 crc kubenswrapper[4690]: I0320 13:26:14.695701 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-6c8pc" Mar 20 13:26:15 crc kubenswrapper[4690]: I0320 13:26:15.570833 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:15 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:15 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:15 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:15 crc kubenswrapper[4690]: I0320 13:26:15.570921 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:15 crc kubenswrapper[4690]: I0320 13:26:15.710438 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-tqft4" Mar 20 13:26:16 crc kubenswrapper[4690]: I0320 13:26:16.565216 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:16 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:16 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:16 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:16 crc kubenswrapper[4690]: I0320 13:26:16.565466 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:17 crc kubenswrapper[4690]: I0320 13:26:17.563886 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:17 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:17 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:17 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:17 crc kubenswrapper[4690]: I0320 13:26:17.563981 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:18 crc kubenswrapper[4690]: I0320 13:26:18.359336 4690 patch_prober.go:28] interesting pod/console-f9d7485db-h2jxx container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Mar 20 13:26:18 crc kubenswrapper[4690]: I0320 13:26:18.359388 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-h2jxx" podUID="74952b15-473b-462f-a05f-6c00433ed4d5" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Mar 20 13:26:18 crc kubenswrapper[4690]: I0320 13:26:18.565815 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:18 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:18 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:18 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:18 crc kubenswrapper[4690]: I0320 13:26:18.565891 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:19 crc kubenswrapper[4690]: I0320 13:26:19.536909 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-7vkfd" Mar 20 13:26:19 crc kubenswrapper[4690]: I0320 13:26:19.564065 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:19 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:19 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:19 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:19 crc kubenswrapper[4690]: I0320 13:26:19.564130 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:20 crc kubenswrapper[4690]: I0320 13:26:20.564835 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:20 crc kubenswrapper[4690]: [-]has-synced failed: reason withheld Mar 20 13:26:20 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:20 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:20 crc kubenswrapper[4690]: I0320 13:26:20.565216 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:20 crc kubenswrapper[4690]: I0320 13:26:20.623690 4690 ???:1] "http: TLS handshake error from 192.168.126.11:42540: no serving certificate available for the kubelet" Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.325646 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.329377 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"8179ea9f-06c5-495c-a546-7fd9f5404b3c","Type":"ContainerDied","Data":"dc61074ab438b861cd6089d0befe4885cf2254ce7898cce3c102ad01dcca4541"} Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.329436 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dc61074ab438b861cd6089d0befe4885cf2254ce7898cce3c102ad01dcca4541" Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.329458 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.472637 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kube-api-access\") pod \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\" (UID: \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\") " Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.472787 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kubelet-dir\") pod \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\" (UID: \"8179ea9f-06c5-495c-a546-7fd9f5404b3c\") " Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.473091 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8179ea9f-06c5-495c-a546-7fd9f5404b3c" (UID: "8179ea9f-06c5-495c-a546-7fd9f5404b3c"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.479440 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8179ea9f-06c5-495c-a546-7fd9f5404b3c" (UID: "8179ea9f-06c5-495c-a546-7fd9f5404b3c"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.565128 4690 patch_prober.go:28] interesting pod/router-default-5444994796-kl5sr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Mar 20 13:26:21 crc kubenswrapper[4690]: [+]has-synced ok Mar 20 13:26:21 crc kubenswrapper[4690]: [+]process-running ok Mar 20 13:26:21 crc kubenswrapper[4690]: healthz check failed Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.565200 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-kl5sr" podUID="23ed3e8b-fcc1-446e-bb52-863602c42c6d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.574116 4690 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kubelet-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:21 crc kubenswrapper[4690]: I0320 13:26:21.574187 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8179ea9f-06c5-495c-a546-7fd9f5404b3c-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:22 crc kubenswrapper[4690]: I0320 13:26:22.565591 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:26:22 crc kubenswrapper[4690]: I0320 13:26:22.568614 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-kl5sr" Mar 20 13:26:23 crc kubenswrapper[4690]: I0320 13:26:23.247848 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7d798ff474-dkkqx"] Mar 20 13:26:23 crc kubenswrapper[4690]: I0320 13:26:23.248414 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" podUID="238466ff-44c9-4e64-9aa3-2f9d2cae17cf" containerName="controller-manager" containerID="cri-o://14b5ed630f3f8548e5abd160a7df429f0b1d4f619f36081b0bcab8752c88d415" gracePeriod=30 Mar 20 13:26:23 crc kubenswrapper[4690]: I0320 13:26:23.257630 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg"] Mar 20 13:26:23 crc kubenswrapper[4690]: I0320 13:26:23.257929 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" podUID="80d2d6ba-80aa-421e-b3bb-64ebd2b09371" containerName="route-controller-manager" containerID="cri-o://f803376e8c51a696cb6980b985aef7dac2d0d513700612f5b5b45d95845f8f2c" gracePeriod=30 Mar 20 13:26:24 crc kubenswrapper[4690]: I0320 13:26:24.366150 4690 generic.go:334] "Generic (PLEG): container finished" podID="80d2d6ba-80aa-421e-b3bb-64ebd2b09371" containerID="f803376e8c51a696cb6980b985aef7dac2d0d513700612f5b5b45d95845f8f2c" exitCode=0 Mar 20 13:26:24 crc kubenswrapper[4690]: I0320 13:26:24.366216 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" event={"ID":"80d2d6ba-80aa-421e-b3bb-64ebd2b09371","Type":"ContainerDied","Data":"f803376e8c51a696cb6980b985aef7dac2d0d513700612f5b5b45d95845f8f2c"} Mar 20 13:26:24 crc kubenswrapper[4690]: I0320 13:26:24.367470 4690 generic.go:334] "Generic (PLEG): container finished" podID="238466ff-44c9-4e64-9aa3-2f9d2cae17cf" containerID="14b5ed630f3f8548e5abd160a7df429f0b1d4f619f36081b0bcab8752c88d415" exitCode=0 Mar 20 13:26:24 crc kubenswrapper[4690]: I0320 13:26:24.367501 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" event={"ID":"238466ff-44c9-4e64-9aa3-2f9d2cae17cf","Type":"ContainerDied","Data":"14b5ed630f3f8548e5abd160a7df429f0b1d4f619f36081b0bcab8752c88d415"} Mar 20 13:26:25 crc kubenswrapper[4690]: I0320 13:26:25.645138 4690 patch_prober.go:28] interesting pod/controller-manager-7d798ff474-dkkqx container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.45:8443/healthz\": dial tcp 10.217.0.45:8443: connect: connection refused" start-of-body= Mar 20 13:26:25 crc kubenswrapper[4690]: I0320 13:26:25.645203 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" podUID="238466ff-44c9-4e64-9aa3-2f9d2cae17cf" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.45:8443/healthz\": dial tcp 10.217.0.45:8443: connect: connection refused" Mar 20 13:26:27 crc kubenswrapper[4690]: I0320 13:26:27.646758 4690 patch_prober.go:28] interesting pod/route-controller-manager-588bd79dbd-fgspg container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.50:8443/healthz\": dial tcp 10.217.0.50:8443: connect: connection refused" start-of-body= Mar 20 13:26:27 crc kubenswrapper[4690]: I0320 13:26:27.646831 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" podUID="80d2d6ba-80aa-421e-b3bb-64ebd2b09371" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.50:8443/healthz\": dial tcp 10.217.0.50:8443: connect: connection refused" Mar 20 13:26:28 crc kubenswrapper[4690]: I0320 13:26:28.373532 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:26:28 crc kubenswrapper[4690]: I0320 13:26:28.380599 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:26:29 crc kubenswrapper[4690]: I0320 13:26:29.064219 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:26:33 crc kubenswrapper[4690]: I0320 13:26:33.829326 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:26:33 crc kubenswrapper[4690]: I0320 13:26:33.829723 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:26:33 crc kubenswrapper[4690]: I0320 13:26:33.971740 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:33 crc kubenswrapper[4690]: I0320 13:26:33.977191 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.000475 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj"] Mar 20 13:26:34 crc kubenswrapper[4690]: E0320 13:26:34.000685 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80d2d6ba-80aa-421e-b3bb-64ebd2b09371" containerName="route-controller-manager" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.000697 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="80d2d6ba-80aa-421e-b3bb-64ebd2b09371" containerName="route-controller-manager" Mar 20 13:26:34 crc kubenswrapper[4690]: E0320 13:26:34.000712 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="238466ff-44c9-4e64-9aa3-2f9d2cae17cf" containerName="controller-manager" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.000717 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="238466ff-44c9-4e64-9aa3-2f9d2cae17cf" containerName="controller-manager" Mar 20 13:26:34 crc kubenswrapper[4690]: E0320 13:26:34.000735 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8179ea9f-06c5-495c-a546-7fd9f5404b3c" containerName="pruner" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.000741 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8179ea9f-06c5-495c-a546-7fd9f5404b3c" containerName="pruner" Mar 20 13:26:34 crc kubenswrapper[4690]: E0320 13:26:34.000749 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5196eff0-8a75-4c63-b081-aea31e9d1b99" containerName="pruner" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.000754 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5196eff0-8a75-4c63-b081-aea31e9d1b99" containerName="pruner" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.000881 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="238466ff-44c9-4e64-9aa3-2f9d2cae17cf" containerName="controller-manager" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.000892 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8179ea9f-06c5-495c-a546-7fd9f5404b3c" containerName="pruner" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.000901 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="80d2d6ba-80aa-421e-b3bb-64ebd2b09371" containerName="route-controller-manager" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.000909 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5196eff0-8a75-4c63-b081-aea31e9d1b99" containerName="pruner" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.001270 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.017254 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj"] Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.100353 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-config\") pod \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.100672 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8sm7\" (UniqueName: \"kubernetes.io/projected/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-kube-api-access-l8sm7\") pod \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.101760 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-client-ca\") pod \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.101822 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-serving-cert\") pod \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.101488 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-config" (OuterVolumeSpecName: "config") pod "80d2d6ba-80aa-421e-b3bb-64ebd2b09371" (UID: "80d2d6ba-80aa-421e-b3bb-64ebd2b09371"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.101907 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-serving-cert\") pod \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\" (UID: \"80d2d6ba-80aa-421e-b3bb-64ebd2b09371\") " Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.101965 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-proxy-ca-bundles\") pod \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.101997 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-config\") pod \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102033 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-client-ca\") pod \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102068 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8blgj\" (UniqueName: \"kubernetes.io/projected/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-kube-api-access-8blgj\") pod \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\" (UID: \"238466ff-44c9-4e64-9aa3-2f9d2cae17cf\") " Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102277 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-client-ca" (OuterVolumeSpecName: "client-ca") pod "80d2d6ba-80aa-421e-b3bb-64ebd2b09371" (UID: "80d2d6ba-80aa-421e-b3bb-64ebd2b09371"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102451 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjdp8\" (UniqueName: \"kubernetes.io/projected/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-kube-api-access-kjdp8\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102610 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-client-ca\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102705 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-config\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102743 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-serving-cert\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102787 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102800 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.102841 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-config" (OuterVolumeSpecName: "config") pod "238466ff-44c9-4e64-9aa3-2f9d2cae17cf" (UID: "238466ff-44c9-4e64-9aa3-2f9d2cae17cf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.103026 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "238466ff-44c9-4e64-9aa3-2f9d2cae17cf" (UID: "238466ff-44c9-4e64-9aa3-2f9d2cae17cf"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.103282 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-client-ca" (OuterVolumeSpecName: "client-ca") pod "238466ff-44c9-4e64-9aa3-2f9d2cae17cf" (UID: "238466ff-44c9-4e64-9aa3-2f9d2cae17cf"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.106164 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-kube-api-access-l8sm7" (OuterVolumeSpecName: "kube-api-access-l8sm7") pod "80d2d6ba-80aa-421e-b3bb-64ebd2b09371" (UID: "80d2d6ba-80aa-421e-b3bb-64ebd2b09371"). InnerVolumeSpecName "kube-api-access-l8sm7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.106421 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "238466ff-44c9-4e64-9aa3-2f9d2cae17cf" (UID: "238466ff-44c9-4e64-9aa3-2f9d2cae17cf"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.114231 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-kube-api-access-8blgj" (OuterVolumeSpecName: "kube-api-access-8blgj") pod "238466ff-44c9-4e64-9aa3-2f9d2cae17cf" (UID: "238466ff-44c9-4e64-9aa3-2f9d2cae17cf"). InnerVolumeSpecName "kube-api-access-8blgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.115386 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "80d2d6ba-80aa-421e-b3bb-64ebd2b09371" (UID: "80d2d6ba-80aa-421e-b3bb-64ebd2b09371"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203534 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjdp8\" (UniqueName: \"kubernetes.io/projected/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-kube-api-access-kjdp8\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203608 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-client-ca\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203668 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-config\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203699 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-serving-cert\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203759 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8sm7\" (UniqueName: \"kubernetes.io/projected/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-kube-api-access-l8sm7\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203774 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203787 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/80d2d6ba-80aa-421e-b3bb-64ebd2b09371-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203798 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203809 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203821 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.203832 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8blgj\" (UniqueName: \"kubernetes.io/projected/238466ff-44c9-4e64-9aa3-2f9d2cae17cf-kube-api-access-8blgj\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.205403 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-config\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.206520 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-client-ca\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.212290 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-serving-cert\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.230650 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjdp8\" (UniqueName: \"kubernetes.io/projected/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-kube-api-access-kjdp8\") pod \"route-controller-manager-6cc8f5bbd6-k8whj\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.331826 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:34 crc kubenswrapper[4690]: E0320 13:26:34.406256 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/openshift4/ose-cli:latest" Mar 20 13:26:34 crc kubenswrapper[4690]: E0320 13:26:34.406620 4690 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 13:26:34 crc kubenswrapper[4690]: container &Container{Name:oc,Image:registry.redhat.io/openshift4/ose-cli:latest,Command:[/bin/bash -c oc get csr -o go-template='{{range .items}}{{if not .status}}{{.metadata.name}}{{"\n"}}{{end}}{{end}}' | xargs --no-run-if-empty oc adm certificate approve Mar 20 13:26:34 crc kubenswrapper[4690]: ],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-t7bth,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod auto-csr-approver-29566886-cp8l8_openshift-infra(6db1d803-f871-41d2-b6a7-0b3456af1ddf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled Mar 20 13:26:34 crc kubenswrapper[4690]: > logger="UnhandledError" Mar 20 13:26:34 crc kubenswrapper[4690]: E0320 13:26:34.408568 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" podUID="6db1d803-f871-41d2-b6a7-0b3456af1ddf" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.456025 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" event={"ID":"238466ff-44c9-4e64-9aa3-2f9d2cae17cf","Type":"ContainerDied","Data":"75442c2764c2303a2a11fa85334f30e9a23920f82247e1a1c1c4f9676225c59f"} Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.456079 4690 scope.go:117] "RemoveContainer" containerID="14b5ed630f3f8548e5abd160a7df429f0b1d4f619f36081b0bcab8752c88d415" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.456175 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7d798ff474-dkkqx" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.460920 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.461143 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg" event={"ID":"80d2d6ba-80aa-421e-b3bb-64ebd2b09371","Type":"ContainerDied","Data":"c7abaa2c9737efd2e60d9319cfc400b903d309e8b77c67c64ead51b7644a7f0d"} Mar 20 13:26:34 crc kubenswrapper[4690]: E0320 13:26:34.463056 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"oc\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/openshift4/ose-cli:latest\\\"\"" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" podUID="6db1d803-f871-41d2-b6a7-0b3456af1ddf" Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.496796 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg"] Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.499621 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-588bd79dbd-fgspg"] Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.507537 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-7d798ff474-dkkqx"] Mar 20 13:26:34 crc kubenswrapper[4690]: I0320 13:26:34.510809 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-7d798ff474-dkkqx"] Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.310445 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6f9464b969-hvndh"] Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.314167 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.316183 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.316199 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.318037 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.318303 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.318307 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.318525 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.318596 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f9464b969-hvndh"] Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.324818 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.331972 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-config\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.332031 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-proxy-ca-bundles\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.332220 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-client-ca\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.332250 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7fsjs\" (UniqueName: \"kubernetes.io/projected/72ee95e1-e7ba-4911-bdd5-69f1303f3309-kube-api-access-7fsjs\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.332282 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72ee95e1-e7ba-4911-bdd5-69f1303f3309-serving-cert\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.421595 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="238466ff-44c9-4e64-9aa3-2f9d2cae17cf" path="/var/lib/kubelet/pods/238466ff-44c9-4e64-9aa3-2f9d2cae17cf/volumes" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.422164 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80d2d6ba-80aa-421e-b3bb-64ebd2b09371" path="/var/lib/kubelet/pods/80d2d6ba-80aa-421e-b3bb-64ebd2b09371/volumes" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.433396 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-config\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.433455 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-proxy-ca-bundles\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.433531 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-client-ca\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.433556 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7fsjs\" (UniqueName: \"kubernetes.io/projected/72ee95e1-e7ba-4911-bdd5-69f1303f3309-kube-api-access-7fsjs\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.433583 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72ee95e1-e7ba-4911-bdd5-69f1303f3309-serving-cert\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.434972 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-config\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.435287 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-client-ca\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.436457 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-proxy-ca-bundles\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.445827 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72ee95e1-e7ba-4911-bdd5-69f1303f3309-serving-cert\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.452070 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7fsjs\" (UniqueName: \"kubernetes.io/projected/72ee95e1-e7ba-4911-bdd5-69f1303f3309-kube-api-access-7fsjs\") pod \"controller-manager-6f9464b969-hvndh\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:36 crc kubenswrapper[4690]: I0320 13:26:36.634968 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:37 crc kubenswrapper[4690]: E0320 13:26:37.457870 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Mar 20 13:26:37 crc kubenswrapper[4690]: E0320 13:26:37.458417 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-46fr2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-vwvxj_openshift-marketplace(b8184a4a-79e5-491e-8e56-ebf0bea4601f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Mar 20 13:26:37 crc kubenswrapper[4690]: E0320 13:26:37.459542 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-vwvxj" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" Mar 20 13:26:37 crc kubenswrapper[4690]: E0320 13:26:37.488119 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-vwvxj" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" Mar 20 13:26:40 crc kubenswrapper[4690]: I0320 13:26:40.600554 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-rnpgp" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.040605 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.041405 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.044060 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.044560 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.044982 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.194598 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60b070f7-2169-44be-8c51-f0348b4db89b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"60b070f7-2169-44be-8c51-f0348b4db89b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.194958 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60b070f7-2169-44be-8c51-f0348b4db89b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"60b070f7-2169-44be-8c51-f0348b4db89b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.295817 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60b070f7-2169-44be-8c51-f0348b4db89b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"60b070f7-2169-44be-8c51-f0348b4db89b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.295896 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60b070f7-2169-44be-8c51-f0348b4db89b-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"60b070f7-2169-44be-8c51-f0348b4db89b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.295930 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60b070f7-2169-44be-8c51-f0348b4db89b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"60b070f7-2169-44be-8c51-f0348b4db89b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.314266 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60b070f7-2169-44be-8c51-f0348b4db89b-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"60b070f7-2169-44be-8c51-f0348b4db89b\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:41 crc kubenswrapper[4690]: I0320 13:26:41.359753 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:42 crc kubenswrapper[4690]: I0320 13:26:42.326598 4690 scope.go:117] "RemoveContainer" containerID="f803376e8c51a696cb6980b985aef7dac2d0d513700612f5b5b45d95845f8f2c" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.340079 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.340268 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-w78vg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-snbxt_openshift-marketplace(c91a8b76-7263-4b29-ac22-b1459fe1f35b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.341688 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-snbxt" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.366702 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.366886 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-flznw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-k9f68_openshift-marketplace(b33d4d78-4e84-48e1-9b17-8427e0bd042e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.367324 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.367486 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-hppjq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-2485h_openshift-marketplace(62e83612-6289-48a8-a3bb-4488048279f7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.368649 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-2485h" podUID="62e83612-6289-48a8-a3bb-4488048279f7" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.368679 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-k9f68" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.503522 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-snbxt" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.504006 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-2485h" podUID="62e83612-6289-48a8-a3bb-4488048279f7" Mar 20 13:26:42 crc kubenswrapper[4690]: E0320 13:26:42.504406 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-k9f68" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" Mar 20 13:26:42 crc kubenswrapper[4690]: I0320 13:26:42.816624 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Mar 20 13:26:42 crc kubenswrapper[4690]: W0320 13:26:42.822569 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod60b070f7_2169_44be_8c51_f0348b4db89b.slice/crio-10f7a845be4b0912b49ee8de7720df62c13c92bdf0430c9416f8f10a42f56f42 WatchSource:0}: Error finding container 10f7a845be4b0912b49ee8de7720df62c13c92bdf0430c9416f8f10a42f56f42: Status 404 returned error can't find the container with id 10f7a845be4b0912b49ee8de7720df62c13c92bdf0430c9416f8f10a42f56f42 Mar 20 13:26:42 crc kubenswrapper[4690]: I0320 13:26:42.826315 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6f9464b969-hvndh"] Mar 20 13:26:42 crc kubenswrapper[4690]: W0320 13:26:42.835934 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72ee95e1_e7ba_4911_bdd5_69f1303f3309.slice/crio-91636ce20f6d44a8c91fa0f03c30bf3825eae6358da83d06a1828a3df2d278f7 WatchSource:0}: Error finding container 91636ce20f6d44a8c91fa0f03c30bf3825eae6358da83d06a1828a3df2d278f7: Status 404 returned error can't find the container with id 91636ce20f6d44a8c91fa0f03c30bf3825eae6358da83d06a1828a3df2d278f7 Mar 20 13:26:42 crc kubenswrapper[4690]: I0320 13:26:42.909934 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj"] Mar 20 13:26:42 crc kubenswrapper[4690]: W0320 13:26:42.964372 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod03f93637_a69f_43d3_a5e8_a206d6bb0fd1.slice/crio-2d6ddc4ff58c4cafa841ad16180ec71d9be706b13aebb9629c123fb7afa67081 WatchSource:0}: Error finding container 2d6ddc4ff58c4cafa841ad16180ec71d9be706b13aebb9629c123fb7afa67081: Status 404 returned error can't find the container with id 2d6ddc4ff58c4cafa841ad16180ec71d9be706b13aebb9629c123fb7afa67081 Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.243447 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6f9464b969-hvndh"] Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.350822 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj"] Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.506710 4690 generic.go:334] "Generic (PLEG): container finished" podID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerID="cba83e2981b16dc4de1aab0015de98325ca77d058d199583f6c5455d1dccb9e3" exitCode=0 Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.507039 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b8fwc" event={"ID":"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb","Type":"ContainerDied","Data":"cba83e2981b16dc4de1aab0015de98325ca77d058d199583f6c5455d1dccb9e3"} Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.509977 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" event={"ID":"72ee95e1-e7ba-4911-bdd5-69f1303f3309","Type":"ContainerStarted","Data":"91636ce20f6d44a8c91fa0f03c30bf3825eae6358da83d06a1828a3df2d278f7"} Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.512837 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwzf4" event={"ID":"3978f4ce-bf05-41c1-b941-c5927fec1785","Type":"ContainerStarted","Data":"f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115"} Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.514140 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" event={"ID":"03f93637-a69f-43d3-a5e8-a206d6bb0fd1","Type":"ContainerStarted","Data":"2d6ddc4ff58c4cafa841ad16180ec71d9be706b13aebb9629c123fb7afa67081"} Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.515362 4690 generic.go:334] "Generic (PLEG): container finished" podID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerID="fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89" exitCode=0 Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.515414 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rnwq" event={"ID":"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6","Type":"ContainerDied","Data":"fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89"} Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.523331 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"60b070f7-2169-44be-8c51-f0348b4db89b","Type":"ContainerStarted","Data":"10f7a845be4b0912b49ee8de7720df62c13c92bdf0430c9416f8f10a42f56f42"} Mar 20 13:26:43 crc kubenswrapper[4690]: I0320 13:26:43.526152 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmjkl" event={"ID":"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a","Type":"ContainerStarted","Data":"d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf"} Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.532344 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" event={"ID":"72ee95e1-e7ba-4911-bdd5-69f1303f3309","Type":"ContainerStarted","Data":"71e849c9ab0f6e88c7a330fcf16d20e9e8fe2e589da79a9d5f69a4d459170b79"} Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.532680 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.532468 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" podUID="72ee95e1-e7ba-4911-bdd5-69f1303f3309" containerName="controller-manager" containerID="cri-o://71e849c9ab0f6e88c7a330fcf16d20e9e8fe2e589da79a9d5f69a4d459170b79" gracePeriod=30 Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.536780 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.541148 4690 generic.go:334] "Generic (PLEG): container finished" podID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerID="f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115" exitCode=0 Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.541250 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwzf4" event={"ID":"3978f4ce-bf05-41c1-b941-c5927fec1785","Type":"ContainerDied","Data":"f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115"} Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.545182 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" event={"ID":"03f93637-a69f-43d3-a5e8-a206d6bb0fd1","Type":"ContainerStarted","Data":"006b34acf9f2dc7549971f29436d3d9143572751a04e2bad76be28f44aa9824e"} Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.545319 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" podUID="03f93637-a69f-43d3-a5e8-a206d6bb0fd1" containerName="route-controller-manager" containerID="cri-o://006b34acf9f2dc7549971f29436d3d9143572751a04e2bad76be28f44aa9824e" gracePeriod=30 Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.545603 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.554514 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" podStartSLOduration=21.554494105 podStartE2EDuration="21.554494105s" podCreationTimestamp="2026-03-20 13:26:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:44.552585092 +0000 UTC m=+250.842185035" watchObservedRunningTime="2026-03-20 13:26:44.554494105 +0000 UTC m=+250.844094048" Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.558198 4690 generic.go:334] "Generic (PLEG): container finished" podID="60b070f7-2169-44be-8c51-f0348b4db89b" containerID="dda5044ce8cf49f6e76460f93eab05596d91a1d3d86a54ee86c03d01494b796f" exitCode=0 Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.558314 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"60b070f7-2169-44be-8c51-f0348b4db89b","Type":"ContainerDied","Data":"dda5044ce8cf49f6e76460f93eab05596d91a1d3d86a54ee86c03d01494b796f"} Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.561644 4690 patch_prober.go:28] interesting pod/route-controller-manager-6cc8f5bbd6-k8whj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.57:8443/healthz\": EOF" start-of-body= Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.561696 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" podUID="03f93637-a69f-43d3-a5e8-a206d6bb0fd1" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.57:8443/healthz\": EOF" Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.563497 4690 generic.go:334] "Generic (PLEG): container finished" podID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerID="d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf" exitCode=0 Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.563531 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmjkl" event={"ID":"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a","Type":"ContainerDied","Data":"d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf"} Mar 20 13:26:44 crc kubenswrapper[4690]: I0320 13:26:44.615367 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" podStartSLOduration=21.615350385 podStartE2EDuration="21.615350385s" podCreationTimestamp="2026-03-20 13:26:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:44.593139538 +0000 UTC m=+250.882739491" watchObservedRunningTime="2026-03-20 13:26:44.615350385 +0000 UTC m=+250.904950338" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.570098 4690 generic.go:334] "Generic (PLEG): container finished" podID="03f93637-a69f-43d3-a5e8-a206d6bb0fd1" containerID="006b34acf9f2dc7549971f29436d3d9143572751a04e2bad76be28f44aa9824e" exitCode=0 Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.570166 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" event={"ID":"03f93637-a69f-43d3-a5e8-a206d6bb0fd1","Type":"ContainerDied","Data":"006b34acf9f2dc7549971f29436d3d9143572751a04e2bad76be28f44aa9824e"} Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.573059 4690 generic.go:334] "Generic (PLEG): container finished" podID="72ee95e1-e7ba-4911-bdd5-69f1303f3309" containerID="71e849c9ab0f6e88c7a330fcf16d20e9e8fe2e589da79a9d5f69a4d459170b79" exitCode=0 Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.573208 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" event={"ID":"72ee95e1-e7ba-4911-bdd5-69f1303f3309","Type":"ContainerDied","Data":"71e849c9ab0f6e88c7a330fcf16d20e9e8fe2e589da79a9d5f69a4d459170b79"} Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.878427 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.882278 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.948476 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959590 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60b070f7-2169-44be-8c51-f0348b4db89b-kube-api-access\") pod \"60b070f7-2169-44be-8c51-f0348b4db89b\" (UID: \"60b070f7-2169-44be-8c51-f0348b4db89b\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959645 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-config\") pod \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959674 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-client-ca\") pod \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959690 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjdp8\" (UniqueName: \"kubernetes.io/projected/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-kube-api-access-kjdp8\") pod \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959711 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7fsjs\" (UniqueName: \"kubernetes.io/projected/72ee95e1-e7ba-4911-bdd5-69f1303f3309-kube-api-access-7fsjs\") pod \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959738 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-proxy-ca-bundles\") pod \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959757 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-client-ca\") pod \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959774 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72ee95e1-e7ba-4911-bdd5-69f1303f3309-serving-cert\") pod \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\" (UID: \"72ee95e1-e7ba-4911-bdd5-69f1303f3309\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959798 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-config\") pod \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959817 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60b070f7-2169-44be-8c51-f0348b4db89b-kubelet-dir\") pod \"60b070f7-2169-44be-8c51-f0348b4db89b\" (UID: \"60b070f7-2169-44be-8c51-f0348b4db89b\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.959834 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-serving-cert\") pod \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\" (UID: \"03f93637-a69f-43d3-a5e8-a206d6bb0fd1\") " Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.960700 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-client-ca" (OuterVolumeSpecName: "client-ca") pod "03f93637-a69f-43d3-a5e8-a206d6bb0fd1" (UID: "03f93637-a69f-43d3-a5e8-a206d6bb0fd1"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.960758 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/60b070f7-2169-44be-8c51-f0348b4db89b-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "60b070f7-2169-44be-8c51-f0348b4db89b" (UID: "60b070f7-2169-44be-8c51-f0348b4db89b"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.960831 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-config" (OuterVolumeSpecName: "config") pod "72ee95e1-e7ba-4911-bdd5-69f1303f3309" (UID: "72ee95e1-e7ba-4911-bdd5-69f1303f3309"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.960881 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-client-ca" (OuterVolumeSpecName: "client-ca") pod "72ee95e1-e7ba-4911-bdd5-69f1303f3309" (UID: "72ee95e1-e7ba-4911-bdd5-69f1303f3309"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.961171 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "72ee95e1-e7ba-4911-bdd5-69f1303f3309" (UID: "72ee95e1-e7ba-4911-bdd5-69f1303f3309"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.961190 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-config" (OuterVolumeSpecName: "config") pod "03f93637-a69f-43d3-a5e8-a206d6bb0fd1" (UID: "03f93637-a69f-43d3-a5e8-a206d6bb0fd1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.968656 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60b070f7-2169-44be-8c51-f0348b4db89b-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "60b070f7-2169-44be-8c51-f0348b4db89b" (UID: "60b070f7-2169-44be-8c51-f0348b4db89b"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.968670 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72ee95e1-e7ba-4911-bdd5-69f1303f3309-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "72ee95e1-e7ba-4911-bdd5-69f1303f3309" (UID: "72ee95e1-e7ba-4911-bdd5-69f1303f3309"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.968701 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-kube-api-access-kjdp8" (OuterVolumeSpecName: "kube-api-access-kjdp8") pod "03f93637-a69f-43d3-a5e8-a206d6bb0fd1" (UID: "03f93637-a69f-43d3-a5e8-a206d6bb0fd1"). InnerVolumeSpecName "kube-api-access-kjdp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.971055 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72ee95e1-e7ba-4911-bdd5-69f1303f3309-kube-api-access-7fsjs" (OuterVolumeSpecName: "kube-api-access-7fsjs") pod "72ee95e1-e7ba-4911-bdd5-69f1303f3309" (UID: "72ee95e1-e7ba-4911-bdd5-69f1303f3309"). InnerVolumeSpecName "kube-api-access-7fsjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:45 crc kubenswrapper[4690]: I0320 13:26:45.973073 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "03f93637-a69f-43d3-a5e8-a206d6bb0fd1" (UID: "03f93637-a69f-43d3-a5e8-a206d6bb0fd1"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.054618 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Mar 20 13:26:46 crc kubenswrapper[4690]: E0320 13:26:46.054978 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="03f93637-a69f-43d3-a5e8-a206d6bb0fd1" containerName="route-controller-manager" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.055008 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="03f93637-a69f-43d3-a5e8-a206d6bb0fd1" containerName="route-controller-manager" Mar 20 13:26:46 crc kubenswrapper[4690]: E0320 13:26:46.055041 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60b070f7-2169-44be-8c51-f0348b4db89b" containerName="pruner" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.055050 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="60b070f7-2169-44be-8c51-f0348b4db89b" containerName="pruner" Mar 20 13:26:46 crc kubenswrapper[4690]: E0320 13:26:46.055066 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72ee95e1-e7ba-4911-bdd5-69f1303f3309" containerName="controller-manager" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.055074 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="72ee95e1-e7ba-4911-bdd5-69f1303f3309" containerName="controller-manager" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.055195 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="72ee95e1-e7ba-4911-bdd5-69f1303f3309" containerName="controller-manager" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.055210 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="03f93637-a69f-43d3-a5e8-a206d6bb0fd1" containerName="route-controller-manager" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.055225 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="60b070f7-2169-44be-8c51-f0348b4db89b" containerName="pruner" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.055701 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.057270 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060763 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060799 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/72ee95e1-e7ba-4911-bdd5-69f1303f3309-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060813 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060825 4690 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/60b070f7-2169-44be-8c51-f0348b4db89b-kubelet-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060835 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060889 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/60b070f7-2169-44be-8c51-f0348b4db89b-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060902 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060913 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjdp8\" (UniqueName: \"kubernetes.io/projected/03f93637-a69f-43d3-a5e8-a206d6bb0fd1-kube-api-access-kjdp8\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060923 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060934 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7fsjs\" (UniqueName: \"kubernetes.io/projected/72ee95e1-e7ba-4911-bdd5-69f1303f3309-kube-api-access-7fsjs\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.060945 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/72ee95e1-e7ba-4911-bdd5-69f1303f3309-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.162394 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e356e5c-eaed-4153-bf94-c373d10612ac-kube-api-access\") pod \"installer-9-crc\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.162706 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.162800 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-var-lock\") pod \"installer-9-crc\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.263485 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e356e5c-eaed-4153-bf94-c373d10612ac-kube-api-access\") pod \"installer-9-crc\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.263539 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.263586 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-var-lock\") pod \"installer-9-crc\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.263652 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-var-lock\") pod \"installer-9-crc\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.263725 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-kubelet-dir\") pod \"installer-9-crc\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.283872 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e356e5c-eaed-4153-bf94-c373d10612ac-kube-api-access\") pod \"installer-9-crc\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.396253 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.578614 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.578636 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj" event={"ID":"03f93637-a69f-43d3-a5e8-a206d6bb0fd1","Type":"ContainerDied","Data":"2d6ddc4ff58c4cafa841ad16180ec71d9be706b13aebb9629c123fb7afa67081"} Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.578731 4690 scope.go:117] "RemoveContainer" containerID="006b34acf9f2dc7549971f29436d3d9143572751a04e2bad76be28f44aa9824e" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.580453 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"60b070f7-2169-44be-8c51-f0348b4db89b","Type":"ContainerDied","Data":"10f7a845be4b0912b49ee8de7720df62c13c92bdf0430c9416f8f10a42f56f42"} Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.580478 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="10f7a845be4b0912b49ee8de7720df62c13c92bdf0430c9416f8f10a42f56f42" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.580511 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.584259 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b8fwc" event={"ID":"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb","Type":"ContainerStarted","Data":"611d8fde86a1b51421a2f105bde027c0aa96c4b952cbe7f54589bcd0337d6980"} Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.586657 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" event={"ID":"72ee95e1-e7ba-4911-bdd5-69f1303f3309","Type":"ContainerDied","Data":"91636ce20f6d44a8c91fa0f03c30bf3825eae6358da83d06a1828a3df2d278f7"} Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.586787 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6f9464b969-hvndh" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.603390 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-b8fwc" podStartSLOduration=3.697772395 podStartE2EDuration="40.603369653s" podCreationTimestamp="2026-03-20 13:26:06 +0000 UTC" firstStartedPulling="2026-03-20 13:26:09.050159901 +0000 UTC m=+215.339759844" lastFinishedPulling="2026-03-20 13:26:45.955757159 +0000 UTC m=+252.245357102" observedRunningTime="2026-03-20 13:26:46.602558921 +0000 UTC m=+252.892158874" watchObservedRunningTime="2026-03-20 13:26:46.603369653 +0000 UTC m=+252.892969596" Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.617049 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-6f9464b969-hvndh"] Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.619806 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-6f9464b969-hvndh"] Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.627399 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj"] Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.631232 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6cc8f5bbd6-k8whj"] Mar 20 13:26:46 crc kubenswrapper[4690]: I0320 13:26:46.668794 4690 scope.go:117] "RemoveContainer" containerID="71e849c9ab0f6e88c7a330fcf16d20e9e8fe2e589da79a9d5f69a4d459170b79" Mar 20 13:26:47 crc kubenswrapper[4690]: I0320 13:26:47.200620 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:47 crc kubenswrapper[4690]: I0320 13:26:47.200688 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:47 crc kubenswrapper[4690]: I0320 13:26:47.665488 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.331906 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg"] Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.332704 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.334356 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.336285 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.336481 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.336673 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.338632 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b"] Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.339467 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.339654 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.339915 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.342062 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.342103 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.342218 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b"] Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.342065 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.342087 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.342712 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.342721 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.346066 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.347019 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg"] Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.420142 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="03f93637-a69f-43d3-a5e8-a206d6bb0fd1" path="/var/lib/kubelet/pods/03f93637-a69f-43d3-a5e8-a206d6bb0fd1/volumes" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.420989 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72ee95e1-e7ba-4911-bdd5-69f1303f3309" path="/var/lib/kubelet/pods/72ee95e1-e7ba-4911-bdd5-69f1303f3309/volumes" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.490133 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqhh7\" (UniqueName: \"kubernetes.io/projected/ee954bea-ef68-4e28-8f3a-f75da816ac69-kube-api-access-wqhh7\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.490508 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-client-ca\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.490653 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-config\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.490762 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5b355e8-413f-42e8-bccd-e20a70b6ea74-serving-cert\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.490940 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-config\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.491045 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-client-ca\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.491141 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-proxy-ca-bundles\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.491274 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89jfr\" (UniqueName: \"kubernetes.io/projected/e5b355e8-413f-42e8-bccd-e20a70b6ea74-kube-api-access-89jfr\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.491380 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee954bea-ef68-4e28-8f3a-f75da816ac69-serving-cert\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.593448 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-config\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.593496 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-client-ca\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.593514 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-proxy-ca-bundles\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.593558 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89jfr\" (UniqueName: \"kubernetes.io/projected/e5b355e8-413f-42e8-bccd-e20a70b6ea74-kube-api-access-89jfr\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.593583 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee954bea-ef68-4e28-8f3a-f75da816ac69-serving-cert\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.593616 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqhh7\" (UniqueName: \"kubernetes.io/projected/ee954bea-ef68-4e28-8f3a-f75da816ac69-kube-api-access-wqhh7\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.593637 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-client-ca\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.593666 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-config\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.593688 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5b355e8-413f-42e8-bccd-e20a70b6ea74-serving-cert\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.595664 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-client-ca\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.595794 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-proxy-ca-bundles\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.596310 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-config\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.598817 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-config\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.601485 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-client-ca\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.602162 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5b355e8-413f-42e8-bccd-e20a70b6ea74-serving-cert\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.602269 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee954bea-ef68-4e28-8f3a-f75da816ac69-serving-cert\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.604081 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rnwq" event={"ID":"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6","Type":"ContainerStarted","Data":"b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6"} Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.605158 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5e356e5c-eaed-4153-bf94-c373d10612ac","Type":"ContainerStarted","Data":"28aeb6ed72eba0e1ad86decca9250cbfecddd78cc1c76e42576a276bf4572835"} Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.613420 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89jfr\" (UniqueName: \"kubernetes.io/projected/e5b355e8-413f-42e8-bccd-e20a70b6ea74-kube-api-access-89jfr\") pod \"controller-manager-5ddfcff99b-x7ntg\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.620301 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqhh7\" (UniqueName: \"kubernetes.io/projected/ee954bea-ef68-4e28-8f3a-f75da816ac69-kube-api-access-wqhh7\") pod \"route-controller-manager-c9b849b5-2x98b\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.623292 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4rnwq" podStartSLOduration=3.26401675 podStartE2EDuration="42.623271917s" podCreationTimestamp="2026-03-20 13:26:06 +0000 UTC" firstStartedPulling="2026-03-20 13:26:07.900884746 +0000 UTC m=+214.190484689" lastFinishedPulling="2026-03-20 13:26:47.260139913 +0000 UTC m=+253.549739856" observedRunningTime="2026-03-20 13:26:48.618915416 +0000 UTC m=+254.908515359" watchObservedRunningTime="2026-03-20 13:26:48.623271917 +0000 UTC m=+254.912871860" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.628340 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-b8fwc" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerName="registry-server" probeResult="failure" output=< Mar 20 13:26:48 crc kubenswrapper[4690]: timeout: failed to connect service ":50051" within 1s Mar 20 13:26:48 crc kubenswrapper[4690]: > Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.667400 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:48 crc kubenswrapper[4690]: I0320 13:26:48.674433 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:49 crc kubenswrapper[4690]: I0320 13:26:49.243967 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg"] Mar 20 13:26:49 crc kubenswrapper[4690]: W0320 13:26:49.252908 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode5b355e8_413f_42e8_bccd_e20a70b6ea74.slice/crio-0d6b6c8b1b7d63500cc69ff8fb2201b051157a1c923a71546c3c0740a7ad6bd5 WatchSource:0}: Error finding container 0d6b6c8b1b7d63500cc69ff8fb2201b051157a1c923a71546c3c0740a7ad6bd5: Status 404 returned error can't find the container with id 0d6b6c8b1b7d63500cc69ff8fb2201b051157a1c923a71546c3c0740a7ad6bd5 Mar 20 13:26:49 crc kubenswrapper[4690]: I0320 13:26:49.297785 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b"] Mar 20 13:26:49 crc kubenswrapper[4690]: W0320 13:26:49.302830 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podee954bea_ef68_4e28_8f3a_f75da816ac69.slice/crio-163e14f5e519929c6e8afee8c2931583d5b2b0ef16cfd05cea5ca1da2cfb608e WatchSource:0}: Error finding container 163e14f5e519929c6e8afee8c2931583d5b2b0ef16cfd05cea5ca1da2cfb608e: Status 404 returned error can't find the container with id 163e14f5e519929c6e8afee8c2931583d5b2b0ef16cfd05cea5ca1da2cfb608e Mar 20 13:26:49 crc kubenswrapper[4690]: I0320 13:26:49.612553 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwzf4" event={"ID":"3978f4ce-bf05-41c1-b941-c5927fec1785","Type":"ContainerStarted","Data":"82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a"} Mar 20 13:26:49 crc kubenswrapper[4690]: I0320 13:26:49.613436 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" event={"ID":"ee954bea-ef68-4e28-8f3a-f75da816ac69","Type":"ContainerStarted","Data":"163e14f5e519929c6e8afee8c2931583d5b2b0ef16cfd05cea5ca1da2cfb608e"} Mar 20 13:26:49 crc kubenswrapper[4690]: I0320 13:26:49.614447 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" event={"ID":"e5b355e8-413f-42e8-bccd-e20a70b6ea74","Type":"ContainerStarted","Data":"0d6b6c8b1b7d63500cc69ff8fb2201b051157a1c923a71546c3c0740a7ad6bd5"} Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.622430 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" event={"ID":"6db1d803-f871-41d2-b6a7-0b3456af1ddf","Type":"ContainerStarted","Data":"7eec703138e85bdbc467294840668e4439643069915bcf24fee54923fde3f973"} Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.625294 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmjkl" event={"ID":"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a","Type":"ContainerStarted","Data":"33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311"} Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.626961 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5e356e5c-eaed-4153-bf94-c373d10612ac","Type":"ContainerStarted","Data":"945df3ab20791ea32df0b9874d35b29ceb81b448bcb1cfac8119164b6ac3b3d2"} Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.628492 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" event={"ID":"ee954bea-ef68-4e28-8f3a-f75da816ac69","Type":"ContainerStarted","Data":"0c4d279ed6387cf0232f93cfb929815036dc10932d797f6ca88ea4fcaf62ac4d"} Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.628705 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.634861 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" event={"ID":"e5b355e8-413f-42e8-bccd-e20a70b6ea74","Type":"ContainerStarted","Data":"30c3d7dac688ebaca9d53f0c7cd7a5fcb330fd1bd3c92a24a361d5302da69a87"} Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.635049 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.638211 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" podStartSLOduration=3.223376368 podStartE2EDuration="50.638192333s" podCreationTimestamp="2026-03-20 13:26:00 +0000 UTC" firstStartedPulling="2026-03-20 13:26:02.862183407 +0000 UTC m=+209.151783350" lastFinishedPulling="2026-03-20 13:26:50.276999372 +0000 UTC m=+256.566599315" observedRunningTime="2026-03-20 13:26:50.636342981 +0000 UTC m=+256.925942934" watchObservedRunningTime="2026-03-20 13:26:50.638192333 +0000 UTC m=+256.927792286" Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.641983 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.655811 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" podStartSLOduration=7.655790591 podStartE2EDuration="7.655790591s" podCreationTimestamp="2026-03-20 13:26:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:50.654591568 +0000 UTC m=+256.944191511" watchObservedRunningTime="2026-03-20 13:26:50.655790591 +0000 UTC m=+256.945390534" Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.669223 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=4.669204424 podStartE2EDuration="4.669204424s" podCreationTimestamp="2026-03-20 13:26:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:50.668374901 +0000 UTC m=+256.957974844" watchObservedRunningTime="2026-03-20 13:26:50.669204424 +0000 UTC m=+256.958804367" Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.706289 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zwzf4" podStartSLOduration=3.91753994 podStartE2EDuration="41.706268963s" podCreationTimestamp="2026-03-20 13:26:09 +0000 UTC" firstStartedPulling="2026-03-20 13:26:11.161548245 +0000 UTC m=+217.451148188" lastFinishedPulling="2026-03-20 13:26:48.950277268 +0000 UTC m=+255.239877211" observedRunningTime="2026-03-20 13:26:50.701784969 +0000 UTC m=+256.991384922" watchObservedRunningTime="2026-03-20 13:26:50.706268963 +0000 UTC m=+256.995868906" Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.736715 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" podStartSLOduration=7.7366943379999995 podStartE2EDuration="7.736694338s" podCreationTimestamp="2026-03-20 13:26:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:26:50.734083506 +0000 UTC m=+257.023683449" watchObservedRunningTime="2026-03-20 13:26:50.736694338 +0000 UTC m=+257.026294281" Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.760660 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gmjkl" podStartSLOduration=3.784651728 podStartE2EDuration="41.760643483s" podCreationTimestamp="2026-03-20 13:26:09 +0000 UTC" firstStartedPulling="2026-03-20 13:26:12.212068387 +0000 UTC m=+218.501668330" lastFinishedPulling="2026-03-20 13:26:50.188060142 +0000 UTC m=+256.477660085" observedRunningTime="2026-03-20 13:26:50.758029631 +0000 UTC m=+257.047629574" watchObservedRunningTime="2026-03-20 13:26:50.760643483 +0000 UTC m=+257.050243426" Mar 20 13:26:50 crc kubenswrapper[4690]: I0320 13:26:50.840795 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:26:51 crc kubenswrapper[4690]: I0320 13:26:51.364887 4690 csr.go:261] certificate signing request csr-k4knk is approved, waiting to be issued Mar 20 13:26:51 crc kubenswrapper[4690]: I0320 13:26:51.373733 4690 csr.go:257] certificate signing request csr-k4knk is issued Mar 20 13:26:51 crc kubenswrapper[4690]: I0320 13:26:51.641053 4690 generic.go:334] "Generic (PLEG): container finished" podID="6db1d803-f871-41d2-b6a7-0b3456af1ddf" containerID="7eec703138e85bdbc467294840668e4439643069915bcf24fee54923fde3f973" exitCode=0 Mar 20 13:26:51 crc kubenswrapper[4690]: I0320 13:26:51.641411 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" event={"ID":"6db1d803-f871-41d2-b6a7-0b3456af1ddf","Type":"ContainerDied","Data":"7eec703138e85bdbc467294840668e4439643069915bcf24fee54923fde3f973"} Mar 20 13:26:51 crc kubenswrapper[4690]: I0320 13:26:51.651064 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Mar 20 13:26:52 crc kubenswrapper[4690]: I0320 13:26:52.375119 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-24 05:54:36 +0000 UTC, rotation deadline is 2026-12-11 20:15:39.676134843 +0000 UTC Mar 20 13:26:52 crc kubenswrapper[4690]: I0320 13:26:52.375164 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6390h48m47.300975184s for next certificate rotation Mar 20 13:26:52 crc kubenswrapper[4690]: I0320 13:26:52.649900 4690 generic.go:334] "Generic (PLEG): container finished" podID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerID="36b79201273353902f7fe5c64f3c468e3973a41e3b96702d299f1beffda05b1c" exitCode=0 Mar 20 13:26:52 crc kubenswrapper[4690]: I0320 13:26:52.650008 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvxj" event={"ID":"b8184a4a-79e5-491e-8e56-ebf0bea4601f","Type":"ContainerDied","Data":"36b79201273353902f7fe5c64f3c468e3973a41e3b96702d299f1beffda05b1c"} Mar 20 13:26:52 crc kubenswrapper[4690]: I0320 13:26:52.955436 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" Mar 20 13:26:52 crc kubenswrapper[4690]: I0320 13:26:52.965675 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7bth\" (UniqueName: \"kubernetes.io/projected/6db1d803-f871-41d2-b6a7-0b3456af1ddf-kube-api-access-t7bth\") pod \"6db1d803-f871-41d2-b6a7-0b3456af1ddf\" (UID: \"6db1d803-f871-41d2-b6a7-0b3456af1ddf\") " Mar 20 13:26:52 crc kubenswrapper[4690]: I0320 13:26:52.972761 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6db1d803-f871-41d2-b6a7-0b3456af1ddf-kube-api-access-t7bth" (OuterVolumeSpecName: "kube-api-access-t7bth") pod "6db1d803-f871-41d2-b6a7-0b3456af1ddf" (UID: "6db1d803-f871-41d2-b6a7-0b3456af1ddf"). InnerVolumeSpecName "kube-api-access-t7bth". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:26:53 crc kubenswrapper[4690]: I0320 13:26:53.067035 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7bth\" (UniqueName: \"kubernetes.io/projected/6db1d803-f871-41d2-b6a7-0b3456af1ddf-kube-api-access-t7bth\") on node \"crc\" DevicePath \"\"" Mar 20 13:26:53 crc kubenswrapper[4690]: I0320 13:26:53.375561 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-24 05:54:36 +0000 UTC, rotation deadline is 2027-01-08 09:53:35.051754396 +0000 UTC Mar 20 13:26:53 crc kubenswrapper[4690]: I0320 13:26:53.375604 4690 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 7052h26m41.676153174s for next certificate rotation Mar 20 13:26:53 crc kubenswrapper[4690]: I0320 13:26:53.656606 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" event={"ID":"6db1d803-f871-41d2-b6a7-0b3456af1ddf","Type":"ContainerDied","Data":"e58ac6245d8ffd8446b904497c6144be8ec2d0e0eac0e9a0349c0718cc2a2875"} Mar 20 13:26:53 crc kubenswrapper[4690]: I0320 13:26:53.657629 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e58ac6245d8ffd8446b904497c6144be8ec2d0e0eac0e9a0349c0718cc2a2875" Mar 20 13:26:53 crc kubenswrapper[4690]: I0320 13:26:53.656661 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566886-cp8l8" Mar 20 13:26:56 crc kubenswrapper[4690]: I0320 13:26:56.592299 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:56 crc kubenswrapper[4690]: I0320 13:26:56.592684 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:56 crc kubenswrapper[4690]: I0320 13:26:56.662528 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:56 crc kubenswrapper[4690]: I0320 13:26:56.714742 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:26:57 crc kubenswrapper[4690]: I0320 13:26:57.246085 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:57 crc kubenswrapper[4690]: I0320 13:26:57.299154 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:26:57 crc kubenswrapper[4690]: I0320 13:26:57.688433 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvxj" event={"ID":"b8184a4a-79e5-491e-8e56-ebf0bea4601f","Type":"ContainerStarted","Data":"c1d6929c49089d5397c8db5e1718d3637c7633a0d298e189ce1f047eea314dea"} Mar 20 13:26:58 crc kubenswrapper[4690]: I0320 13:26:58.725031 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vwvxj" podStartSLOduration=6.413623434 podStartE2EDuration="52.725008821s" podCreationTimestamp="2026-03-20 13:26:06 +0000 UTC" firstStartedPulling="2026-03-20 13:26:09.031516167 +0000 UTC m=+215.321116110" lastFinishedPulling="2026-03-20 13:26:55.342901554 +0000 UTC m=+261.632501497" observedRunningTime="2026-03-20 13:26:58.724442652 +0000 UTC m=+265.014042615" watchObservedRunningTime="2026-03-20 13:26:58.725008821 +0000 UTC m=+265.014608774" Mar 20 13:26:58 crc kubenswrapper[4690]: I0320 13:26:58.749088 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b8fwc"] Mar 20 13:26:58 crc kubenswrapper[4690]: I0320 13:26:58.749421 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-b8fwc" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerName="registry-server" containerID="cri-o://611d8fde86a1b51421a2f105bde027c0aa96c4b952cbe7f54589bcd0337d6980" gracePeriod=2 Mar 20 13:26:59 crc kubenswrapper[4690]: I0320 13:26:59.747478 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:59 crc kubenswrapper[4690]: I0320 13:26:59.748023 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:26:59 crc kubenswrapper[4690]: I0320 13:26:59.804262 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:27:00 crc kubenswrapper[4690]: I0320 13:27:00.214404 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:27:00 crc kubenswrapper[4690]: I0320 13:27:00.214659 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:27:00 crc kubenswrapper[4690]: I0320 13:27:00.254342 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:27:00 crc kubenswrapper[4690]: I0320 13:27:00.704172 4690 generic.go:334] "Generic (PLEG): container finished" podID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerID="611d8fde86a1b51421a2f105bde027c0aa96c4b952cbe7f54589bcd0337d6980" exitCode=0 Mar 20 13:27:00 crc kubenswrapper[4690]: I0320 13:27:00.704280 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b8fwc" event={"ID":"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb","Type":"ContainerDied","Data":"611d8fde86a1b51421a2f105bde027c0aa96c4b952cbe7f54589bcd0337d6980"} Mar 20 13:27:00 crc kubenswrapper[4690]: I0320 13:27:00.757537 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:27:00 crc kubenswrapper[4690]: I0320 13:27:00.760748 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:27:02 crc kubenswrapper[4690]: I0320 13:27:02.135527 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gmjkl"] Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.248965 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg"] Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.249312 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" podUID="e5b355e8-413f-42e8-bccd-e20a70b6ea74" containerName="controller-manager" containerID="cri-o://30c3d7dac688ebaca9d53f0c7cd7a5fcb330fd1bd3c92a24a361d5302da69a87" gracePeriod=30 Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.266632 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b"] Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.266876 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" podUID="ee954bea-ef68-4e28-8f3a-f75da816ac69" containerName="route-controller-manager" containerID="cri-o://0c4d279ed6387cf0232f93cfb929815036dc10932d797f6ca88ea4fcaf62ac4d" gracePeriod=30 Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.726189 4690 generic.go:334] "Generic (PLEG): container finished" podID="ee954bea-ef68-4e28-8f3a-f75da816ac69" containerID="0c4d279ed6387cf0232f93cfb929815036dc10932d797f6ca88ea4fcaf62ac4d" exitCode=0 Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.726263 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" event={"ID":"ee954bea-ef68-4e28-8f3a-f75da816ac69","Type":"ContainerDied","Data":"0c4d279ed6387cf0232f93cfb929815036dc10932d797f6ca88ea4fcaf62ac4d"} Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.728100 4690 generic.go:334] "Generic (PLEG): container finished" podID="e5b355e8-413f-42e8-bccd-e20a70b6ea74" containerID="30c3d7dac688ebaca9d53f0c7cd7a5fcb330fd1bd3c92a24a361d5302da69a87" exitCode=0 Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.728161 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" event={"ID":"e5b355e8-413f-42e8-bccd-e20a70b6ea74","Type":"ContainerDied","Data":"30c3d7dac688ebaca9d53f0c7cd7a5fcb330fd1bd3c92a24a361d5302da69a87"} Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.728466 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gmjkl" podUID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerName="registry-server" containerID="cri-o://33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311" gracePeriod=2 Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.829212 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:27:03 crc kubenswrapper[4690]: I0320 13:27:03.829275 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.174474 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.261712 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-utilities\") pod \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.261835 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-catalog-content\") pod \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.261922 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2zch7\" (UniqueName: \"kubernetes.io/projected/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-kube-api-access-2zch7\") pod \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\" (UID: \"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.263167 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-utilities" (OuterVolumeSpecName: "utilities") pod "e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" (UID: "e38d71d2-09b1-4ff8-b9df-91da1e2b97cb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.268301 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-kube-api-access-2zch7" (OuterVolumeSpecName: "kube-api-access-2zch7") pod "e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" (UID: "e38d71d2-09b1-4ff8-b9df-91da1e2b97cb"). InnerVolumeSpecName "kube-api-access-2zch7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.328782 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" (UID: "e38d71d2-09b1-4ff8-b9df-91da1e2b97cb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.362988 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.363026 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2zch7\" (UniqueName: \"kubernetes.io/projected/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-kube-api-access-2zch7\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.363059 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.636473 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.718273 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.739533 4690 generic.go:334] "Generic (PLEG): container finished" podID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerID="33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311" exitCode=0 Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.739601 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmjkl" event={"ID":"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a","Type":"ContainerDied","Data":"33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311"} Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.739631 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmjkl" event={"ID":"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a","Type":"ContainerDied","Data":"f82a19ad6323fc0ff28828b3dbfbc17bf6d059311fb5f4d6cce9311c585e694d"} Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.739649 4690 scope.go:117] "RemoveContainer" containerID="33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.739751 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gmjkl" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.742495 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-b8fwc" event={"ID":"e38d71d2-09b1-4ff8-b9df-91da1e2b97cb","Type":"ContainerDied","Data":"53363d59764e451a993d125df6416f58ac6edd1d58a58e665e1805181c841f6d"} Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.742575 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-b8fwc" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.744278 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" event={"ID":"ee954bea-ef68-4e28-8f3a-f75da816ac69","Type":"ContainerDied","Data":"163e14f5e519929c6e8afee8c2931583d5b2b0ef16cfd05cea5ca1da2cfb608e"} Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.744325 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.772625 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-b8fwc"] Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.778112 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-config\") pod \"ee954bea-ef68-4e28-8f3a-f75da816ac69\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.778208 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-client-ca\") pod \"ee954bea-ef68-4e28-8f3a-f75da816ac69\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.778249 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-utilities\") pod \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.778388 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee954bea-ef68-4e28-8f3a-f75da816ac69-serving-cert\") pod \"ee954bea-ef68-4e28-8f3a-f75da816ac69\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.778421 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtpb5\" (UniqueName: \"kubernetes.io/projected/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-kube-api-access-wtpb5\") pod \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.778461 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqhh7\" (UniqueName: \"kubernetes.io/projected/ee954bea-ef68-4e28-8f3a-f75da816ac69-kube-api-access-wqhh7\") pod \"ee954bea-ef68-4e28-8f3a-f75da816ac69\" (UID: \"ee954bea-ef68-4e28-8f3a-f75da816ac69\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.778483 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-catalog-content\") pod \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\" (UID: \"9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a\") " Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.779135 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-b8fwc"] Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.779896 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-utilities" (OuterVolumeSpecName: "utilities") pod "9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" (UID: "9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.780748 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-config" (OuterVolumeSpecName: "config") pod "ee954bea-ef68-4e28-8f3a-f75da816ac69" (UID: "ee954bea-ef68-4e28-8f3a-f75da816ac69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.781182 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-client-ca" (OuterVolumeSpecName: "client-ca") pod "ee954bea-ef68-4e28-8f3a-f75da816ac69" (UID: "ee954bea-ef68-4e28-8f3a-f75da816ac69"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.785651 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-kube-api-access-wtpb5" (OuterVolumeSpecName: "kube-api-access-wtpb5") pod "9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" (UID: "9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a"). InnerVolumeSpecName "kube-api-access-wtpb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.786590 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ee954bea-ef68-4e28-8f3a-f75da816ac69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ee954bea-ef68-4e28-8f3a-f75da816ac69" (UID: "ee954bea-ef68-4e28-8f3a-f75da816ac69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.787315 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ee954bea-ef68-4e28-8f3a-f75da816ac69-kube-api-access-wqhh7" (OuterVolumeSpecName: "kube-api-access-wqhh7") pod "ee954bea-ef68-4e28-8f3a-f75da816ac69" (UID: "ee954bea-ef68-4e28-8f3a-f75da816ac69"). InnerVolumeSpecName "kube-api-access-wqhh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.880363 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ee954bea-ef68-4e28-8f3a-f75da816ac69-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.880655 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtpb5\" (UniqueName: \"kubernetes.io/projected/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-kube-api-access-wtpb5\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.880672 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqhh7\" (UniqueName: \"kubernetes.io/projected/ee954bea-ef68-4e28-8f3a-f75da816ac69-kube-api-access-wqhh7\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.880689 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.880702 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ee954bea-ef68-4e28-8f3a-f75da816ac69-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.880714 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.908451 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" (UID: "9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:27:04 crc kubenswrapper[4690]: I0320 13:27:04.982118 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.078656 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gmjkl"] Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.082118 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gmjkl"] Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.089283 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b"] Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.092164 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-c9b849b5-2x98b"] Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.180377 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.241917 4690 scope.go:117] "RemoveContainer" containerID="d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.274776 4690 scope.go:117] "RemoveContainer" containerID="79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.284155 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-config\") pod \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.284241 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-client-ca\") pod \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.284307 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-proxy-ca-bundles\") pod \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.284405 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89jfr\" (UniqueName: \"kubernetes.io/projected/e5b355e8-413f-42e8-bccd-e20a70b6ea74-kube-api-access-89jfr\") pod \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.284482 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5b355e8-413f-42e8-bccd-e20a70b6ea74-serving-cert\") pod \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\" (UID: \"e5b355e8-413f-42e8-bccd-e20a70b6ea74\") " Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.285226 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-client-ca" (OuterVolumeSpecName: "client-ca") pod "e5b355e8-413f-42e8-bccd-e20a70b6ea74" (UID: "e5b355e8-413f-42e8-bccd-e20a70b6ea74"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.285285 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e5b355e8-413f-42e8-bccd-e20a70b6ea74" (UID: "e5b355e8-413f-42e8-bccd-e20a70b6ea74"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.285321 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-config" (OuterVolumeSpecName: "config") pod "e5b355e8-413f-42e8-bccd-e20a70b6ea74" (UID: "e5b355e8-413f-42e8-bccd-e20a70b6ea74"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.288722 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5b355e8-413f-42e8-bccd-e20a70b6ea74-kube-api-access-89jfr" (OuterVolumeSpecName: "kube-api-access-89jfr") pod "e5b355e8-413f-42e8-bccd-e20a70b6ea74" (UID: "e5b355e8-413f-42e8-bccd-e20a70b6ea74"). InnerVolumeSpecName "kube-api-access-89jfr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.289002 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5b355e8-413f-42e8-bccd-e20a70b6ea74-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e5b355e8-413f-42e8-bccd-e20a70b6ea74" (UID: "e5b355e8-413f-42e8-bccd-e20a70b6ea74"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.289992 4690 scope.go:117] "RemoveContainer" containerID="33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311" Mar 20 13:27:05 crc kubenswrapper[4690]: E0320 13:27:05.290572 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311\": container with ID starting with 33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311 not found: ID does not exist" containerID="33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.290651 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311"} err="failed to get container status \"33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311\": rpc error: code = NotFound desc = could not find container \"33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311\": container with ID starting with 33620a74478a1187043eb906e8b34948f79dc5cddb3eea9bfd9824f21ca0a311 not found: ID does not exist" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.290690 4690 scope.go:117] "RemoveContainer" containerID="d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf" Mar 20 13:27:05 crc kubenswrapper[4690]: E0320 13:27:05.292224 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf\": container with ID starting with d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf not found: ID does not exist" containerID="d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.292244 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf"} err="failed to get container status \"d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf\": rpc error: code = NotFound desc = could not find container \"d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf\": container with ID starting with d87a5e8ac9cd9ba87ce8ddf9f9d0207c62f0bb0248646756f674790954a275cf not found: ID does not exist" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.292290 4690 scope.go:117] "RemoveContainer" containerID="79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b" Mar 20 13:27:05 crc kubenswrapper[4690]: E0320 13:27:05.292593 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b\": container with ID starting with 79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b not found: ID does not exist" containerID="79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.292617 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b"} err="failed to get container status \"79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b\": rpc error: code = NotFound desc = could not find container \"79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b\": container with ID starting with 79e28a1e19d707e0cd7d8605fad8acaab150ee11b636b789aba2e883fd58149b not found: ID does not exist" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.292630 4690 scope.go:117] "RemoveContainer" containerID="611d8fde86a1b51421a2f105bde027c0aa96c4b952cbe7f54589bcd0337d6980" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.320943 4690 scope.go:117] "RemoveContainer" containerID="cba83e2981b16dc4de1aab0015de98325ca77d058d199583f6c5455d1dccb9e3" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.342975 4690 scope.go:117] "RemoveContainer" containerID="f087f74a8a7f1068b6844b2f1f2ea22618c2efff5adfc068a3d7258ef4848e54" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.361124 4690 scope.go:117] "RemoveContainer" containerID="0c4d279ed6387cf0232f93cfb929815036dc10932d797f6ca88ea4fcaf62ac4d" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.386228 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.386265 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.386279 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e5b355e8-413f-42e8-bccd-e20a70b6ea74-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.386295 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89jfr\" (UniqueName: \"kubernetes.io/projected/e5b355e8-413f-42e8-bccd-e20a70b6ea74-kube-api-access-89jfr\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.386306 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5b355e8-413f-42e8-bccd-e20a70b6ea74-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.750836 4690 generic.go:334] "Generic (PLEG): container finished" podID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerID="0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17" exitCode=0 Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.750883 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9f68" event={"ID":"b33d4d78-4e84-48e1-9b17-8427e0bd042e","Type":"ContainerDied","Data":"0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17"} Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.758740 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" event={"ID":"e5b355e8-413f-42e8-bccd-e20a70b6ea74","Type":"ContainerDied","Data":"0d6b6c8b1b7d63500cc69ff8fb2201b051157a1c923a71546c3c0740a7ad6bd5"} Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.758783 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.758816 4690 scope.go:117] "RemoveContainer" containerID="30c3d7dac688ebaca9d53f0c7cd7a5fcb330fd1bd3c92a24a361d5302da69a87" Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.760952 4690 generic.go:334] "Generic (PLEG): container finished" podID="62e83612-6289-48a8-a3bb-4488048279f7" containerID="085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b" exitCode=0 Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.761015 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2485h" event={"ID":"62e83612-6289-48a8-a3bb-4488048279f7","Type":"ContainerDied","Data":"085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b"} Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.762402 4690 generic.go:334] "Generic (PLEG): container finished" podID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerID="0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548" exitCode=0 Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.762443 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-snbxt" event={"ID":"c91a8b76-7263-4b29-ac22-b1459fe1f35b","Type":"ContainerDied","Data":"0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548"} Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.833436 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg"] Mar 20 13:27:05 crc kubenswrapper[4690]: I0320 13:27:05.836103 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5ddfcff99b-x7ntg"] Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340302 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-664fd65b8c-lljln"] Mar 20 13:27:06 crc kubenswrapper[4690]: E0320 13:27:06.340620 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ee954bea-ef68-4e28-8f3a-f75da816ac69" containerName="route-controller-manager" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340639 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="ee954bea-ef68-4e28-8f3a-f75da816ac69" containerName="route-controller-manager" Mar 20 13:27:06 crc kubenswrapper[4690]: E0320 13:27:06.340656 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerName="extract-content" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340664 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerName="extract-content" Mar 20 13:27:06 crc kubenswrapper[4690]: E0320 13:27:06.340677 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerName="registry-server" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340684 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerName="registry-server" Mar 20 13:27:06 crc kubenswrapper[4690]: E0320 13:27:06.340692 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5b355e8-413f-42e8-bccd-e20a70b6ea74" containerName="controller-manager" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340700 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5b355e8-413f-42e8-bccd-e20a70b6ea74" containerName="controller-manager" Mar 20 13:27:06 crc kubenswrapper[4690]: E0320 13:27:06.340709 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerName="registry-server" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340715 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerName="registry-server" Mar 20 13:27:06 crc kubenswrapper[4690]: E0320 13:27:06.340722 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6db1d803-f871-41d2-b6a7-0b3456af1ddf" containerName="oc" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340728 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="6db1d803-f871-41d2-b6a7-0b3456af1ddf" containerName="oc" Mar 20 13:27:06 crc kubenswrapper[4690]: E0320 13:27:06.340734 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerName="extract-utilities" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340740 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerName="extract-utilities" Mar 20 13:27:06 crc kubenswrapper[4690]: E0320 13:27:06.340751 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerName="extract-utilities" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340756 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerName="extract-utilities" Mar 20 13:27:06 crc kubenswrapper[4690]: E0320 13:27:06.340764 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerName="extract-content" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340770 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerName="extract-content" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340876 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" containerName="registry-server" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340888 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" containerName="registry-server" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340897 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5b355e8-413f-42e8-bccd-e20a70b6ea74" containerName="controller-manager" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340910 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="6db1d803-f871-41d2-b6a7-0b3456af1ddf" containerName="oc" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.340916 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="ee954bea-ef68-4e28-8f3a-f75da816ac69" containerName="route-controller-manager" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.341374 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.344162 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.344520 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r"] Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.344652 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.344824 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.344958 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.345343 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.345473 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.345490 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.352130 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.352222 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.352280 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.352321 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.353281 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.353762 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.359102 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.359473 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r"] Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.365934 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-664fd65b8c-lljln"] Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.398784 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-config\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.398874 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-config\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.398904 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-client-ca\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.398954 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b503c44-4908-42e5-931c-c44d6337a0ad-serving-cert\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.398981 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7hkx\" (UniqueName: \"kubernetes.io/projected/6b503c44-4908-42e5-931c-c44d6337a0ad-kube-api-access-h7hkx\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.399007 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-client-ca\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.399046 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5rnt6\" (UniqueName: \"kubernetes.io/projected/5bcf346d-5d59-4b80-97da-fce796c1259b-kube-api-access-5rnt6\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.399109 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bcf346d-5d59-4b80-97da-fce796c1259b-serving-cert\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.399148 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-proxy-ca-bundles\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.423645 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a" path="/var/lib/kubelet/pods/9a0ca900-7de1-4e9a-b8d6-accb8bbe7e7a/volumes" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.425043 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e38d71d2-09b1-4ff8-b9df-91da1e2b97cb" path="/var/lib/kubelet/pods/e38d71d2-09b1-4ff8-b9df-91da1e2b97cb/volumes" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.426096 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5b355e8-413f-42e8-bccd-e20a70b6ea74" path="/var/lib/kubelet/pods/e5b355e8-413f-42e8-bccd-e20a70b6ea74/volumes" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.426774 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ee954bea-ef68-4e28-8f3a-f75da816ac69" path="/var/lib/kubelet/pods/ee954bea-ef68-4e28-8f3a-f75da816ac69/volumes" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.500566 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b503c44-4908-42e5-931c-c44d6337a0ad-serving-cert\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.500611 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7hkx\" (UniqueName: \"kubernetes.io/projected/6b503c44-4908-42e5-931c-c44d6337a0ad-kube-api-access-h7hkx\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.500634 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-client-ca\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.500666 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5rnt6\" (UniqueName: \"kubernetes.io/projected/5bcf346d-5d59-4b80-97da-fce796c1259b-kube-api-access-5rnt6\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.500683 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bcf346d-5d59-4b80-97da-fce796c1259b-serving-cert\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.500705 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-proxy-ca-bundles\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.500726 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-config\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.500752 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-config\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.500767 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-client-ca\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.501600 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-client-ca\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.502097 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-client-ca\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.502290 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-config\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.502830 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-config\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.503201 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-proxy-ca-bundles\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.505639 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bcf346d-5d59-4b80-97da-fce796c1259b-serving-cert\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.506782 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b503c44-4908-42e5-931c-c44d6337a0ad-serving-cert\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.518269 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5rnt6\" (UniqueName: \"kubernetes.io/projected/5bcf346d-5d59-4b80-97da-fce796c1259b-kube-api-access-5rnt6\") pod \"route-controller-manager-7ffb96f869-p7m8r\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.520733 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7hkx\" (UniqueName: \"kubernetes.io/projected/6b503c44-4908-42e5-931c-c44d6337a0ad-kube-api-access-h7hkx\") pod \"controller-manager-664fd65b8c-lljln\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.662476 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.676146 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:06 crc kubenswrapper[4690]: I0320 13:27:06.771040 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9f68" event={"ID":"b33d4d78-4e84-48e1-9b17-8427e0bd042e","Type":"ContainerStarted","Data":"02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b"} Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.016093 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.016133 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.073271 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.203699 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r"] Mar 20 13:27:07 crc kubenswrapper[4690]: W0320 13:27:07.206516 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5bcf346d_5d59_4b80_97da_fce796c1259b.slice/crio-fd07d8c429addf4e11f590440f4c0fa328ce9c1e639b847529005cdbc83e445c WatchSource:0}: Error finding container fd07d8c429addf4e11f590440f4c0fa328ce9c1e639b847529005cdbc83e445c: Status 404 returned error can't find the container with id fd07d8c429addf4e11f590440f4c0fa328ce9c1e639b847529005cdbc83e445c Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.391865 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-664fd65b8c-lljln"] Mar 20 13:27:07 crc kubenswrapper[4690]: W0320 13:27:07.405218 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6b503c44_4908_42e5_931c_c44d6337a0ad.slice/crio-06c821ce66601f268dcd811d039a3e3fc3c398793bda4e7ec65cf6c3bd24af44 WatchSource:0}: Error finding container 06c821ce66601f268dcd811d039a3e3fc3c398793bda4e7ec65cf6c3bd24af44: Status 404 returned error can't find the container with id 06c821ce66601f268dcd811d039a3e3fc3c398793bda4e7ec65cf6c3bd24af44 Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.780990 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2485h" event={"ID":"62e83612-6289-48a8-a3bb-4488048279f7","Type":"ContainerStarted","Data":"d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040"} Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.783386 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-snbxt" event={"ID":"c91a8b76-7263-4b29-ac22-b1459fe1f35b","Type":"ContainerStarted","Data":"e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc"} Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.784888 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" event={"ID":"5bcf346d-5d59-4b80-97da-fce796c1259b","Type":"ContainerStarted","Data":"9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568"} Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.784914 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" event={"ID":"5bcf346d-5d59-4b80-97da-fce796c1259b","Type":"ContainerStarted","Data":"fd07d8c429addf4e11f590440f4c0fa328ce9c1e639b847529005cdbc83e445c"} Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.785146 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.786295 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" event={"ID":"6b503c44-4908-42e5-931c-c44d6337a0ad","Type":"ContainerStarted","Data":"d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40"} Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.786326 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" event={"ID":"6b503c44-4908-42e5-931c-c44d6337a0ad","Type":"ContainerStarted","Data":"06c821ce66601f268dcd811d039a3e3fc3c398793bda4e7ec65cf6c3bd24af44"} Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.815224 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2485h" podStartSLOduration=3.9031837 podStartE2EDuration="1m1.815200081s" podCreationTimestamp="2026-03-20 13:26:06 +0000 UTC" firstStartedPulling="2026-03-20 13:26:09.007379323 +0000 UTC m=+215.296979266" lastFinishedPulling="2026-03-20 13:27:06.919395694 +0000 UTC m=+273.208995647" observedRunningTime="2026-03-20 13:27:07.808678363 +0000 UTC m=+274.098278316" watchObservedRunningTime="2026-03-20 13:27:07.815200081 +0000 UTC m=+274.104800024" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.833734 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" podStartSLOduration=4.833720509 podStartE2EDuration="4.833720509s" podCreationTimestamp="2026-03-20 13:27:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:27:07.832712165 +0000 UTC m=+274.122312108" watchObservedRunningTime="2026-03-20 13:27:07.833720509 +0000 UTC m=+274.123320452" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.852989 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.861813 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-snbxt" podStartSLOduration=2.849668056 podStartE2EDuration="59.861793026s" podCreationTimestamp="2026-03-20 13:26:08 +0000 UTC" firstStartedPulling="2026-03-20 13:26:10.080755316 +0000 UTC m=+216.370355259" lastFinishedPulling="2026-03-20 13:27:07.092880286 +0000 UTC m=+273.382480229" observedRunningTime="2026-03-20 13:27:07.858890009 +0000 UTC m=+274.148489942" watchObservedRunningTime="2026-03-20 13:27:07.861793026 +0000 UTC m=+274.151392959" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.885955 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-k9f68" podStartSLOduration=3.408780602 podStartE2EDuration="59.885939012s" podCreationTimestamp="2026-03-20 13:26:08 +0000 UTC" firstStartedPulling="2026-03-20 13:26:10.097789684 +0000 UTC m=+216.387389627" lastFinishedPulling="2026-03-20 13:27:06.574948094 +0000 UTC m=+272.864548037" observedRunningTime="2026-03-20 13:27:07.883514911 +0000 UTC m=+274.173114864" watchObservedRunningTime="2026-03-20 13:27:07.885939012 +0000 UTC m=+274.175538955" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.902940 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:07 crc kubenswrapper[4690]: I0320 13:27:07.910314 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" podStartSLOduration=4.910281945 podStartE2EDuration="4.910281945s" podCreationTimestamp="2026-03-20 13:27:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:27:07.907020326 +0000 UTC m=+274.196620269" watchObservedRunningTime="2026-03-20 13:27:07.910281945 +0000 UTC m=+274.199881888" Mar 20 13:27:08 crc kubenswrapper[4690]: I0320 13:27:08.564690 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:27:08 crc kubenswrapper[4690]: I0320 13:27:08.564739 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:27:08 crc kubenswrapper[4690]: I0320 13:27:08.792663 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:08 crc kubenswrapper[4690]: I0320 13:27:08.797637 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:08 crc kubenswrapper[4690]: I0320 13:27:08.942208 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:27:08 crc kubenswrapper[4690]: I0320 13:27:08.942263 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:27:08 crc kubenswrapper[4690]: I0320 13:27:08.982997 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:27:09 crc kubenswrapper[4690]: I0320 13:27:09.613258 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-snbxt" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerName="registry-server" probeResult="failure" output=< Mar 20 13:27:09 crc kubenswrapper[4690]: timeout: failed to connect service ":50051" within 1s Mar 20 13:27:09 crc kubenswrapper[4690]: > Mar 20 13:27:10 crc kubenswrapper[4690]: I0320 13:27:10.537828 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vwvxj"] Mar 20 13:27:10 crc kubenswrapper[4690]: I0320 13:27:10.538274 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vwvxj" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerName="registry-server" containerID="cri-o://c1d6929c49089d5397c8db5e1718d3637c7633a0d298e189ce1f047eea314dea" gracePeriod=2 Mar 20 13:27:10 crc kubenswrapper[4690]: I0320 13:27:10.806945 4690 generic.go:334] "Generic (PLEG): container finished" podID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerID="c1d6929c49089d5397c8db5e1718d3637c7633a0d298e189ce1f047eea314dea" exitCode=0 Mar 20 13:27:10 crc kubenswrapper[4690]: I0320 13:27:10.807025 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvxj" event={"ID":"b8184a4a-79e5-491e-8e56-ebf0bea4601f","Type":"ContainerDied","Data":"c1d6929c49089d5397c8db5e1718d3637c7633a0d298e189ce1f047eea314dea"} Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.027829 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.165834 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-catalog-content\") pod \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.165928 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46fr2\" (UniqueName: \"kubernetes.io/projected/b8184a4a-79e5-491e-8e56-ebf0bea4601f-kube-api-access-46fr2\") pod \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.165976 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-utilities\") pod \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\" (UID: \"b8184a4a-79e5-491e-8e56-ebf0bea4601f\") " Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.167070 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-utilities" (OuterVolumeSpecName: "utilities") pod "b8184a4a-79e5-491e-8e56-ebf0bea4601f" (UID: "b8184a4a-79e5-491e-8e56-ebf0bea4601f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.175795 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8184a4a-79e5-491e-8e56-ebf0bea4601f-kube-api-access-46fr2" (OuterVolumeSpecName: "kube-api-access-46fr2") pod "b8184a4a-79e5-491e-8e56-ebf0bea4601f" (UID: "b8184a4a-79e5-491e-8e56-ebf0bea4601f"). InnerVolumeSpecName "kube-api-access-46fr2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.231597 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8184a4a-79e5-491e-8e56-ebf0bea4601f" (UID: "b8184a4a-79e5-491e-8e56-ebf0bea4601f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.267406 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.267467 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46fr2\" (UniqueName: \"kubernetes.io/projected/b8184a4a-79e5-491e-8e56-ebf0bea4601f-kube-api-access-46fr2\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.267489 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8184a4a-79e5-491e-8e56-ebf0bea4601f-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.816646 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vwvxj" event={"ID":"b8184a4a-79e5-491e-8e56-ebf0bea4601f","Type":"ContainerDied","Data":"1a23811a389da7dc02bd4e79b357d5b931f5443405fb23c1939da945a1c51e96"} Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.816745 4690 scope.go:117] "RemoveContainer" containerID="c1d6929c49089d5397c8db5e1718d3637c7633a0d298e189ce1f047eea314dea" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.816974 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vwvxj" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.845709 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vwvxj"] Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.845783 4690 scope.go:117] "RemoveContainer" containerID="36b79201273353902f7fe5c64f3c468e3973a41e3b96702d299f1beffda05b1c" Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.852515 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vwvxj"] Mar 20 13:27:11 crc kubenswrapper[4690]: I0320 13:27:11.883047 4690 scope.go:117] "RemoveContainer" containerID="77519c387aa0e2acd33b6c0772854e05e13fbd414c8f44de6b75ec3564abb188" Mar 20 13:27:12 crc kubenswrapper[4690]: I0320 13:27:12.424082 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" path="/var/lib/kubelet/pods/b8184a4a-79e5-491e-8e56-ebf0bea4601f/volumes" Mar 20 13:27:16 crc kubenswrapper[4690]: I0320 13:27:16.824688 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:27:16 crc kubenswrapper[4690]: I0320 13:27:16.825351 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:27:16 crc kubenswrapper[4690]: I0320 13:27:16.867590 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:27:16 crc kubenswrapper[4690]: I0320 13:27:16.906460 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:27:18 crc kubenswrapper[4690]: I0320 13:27:18.635779 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:27:18 crc kubenswrapper[4690]: I0320 13:27:18.709263 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:27:18 crc kubenswrapper[4690]: I0320 13:27:18.999739 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:27:19 crc kubenswrapper[4690]: I0320 13:27:19.121483 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rj8zv"] Mar 20 13:27:19 crc kubenswrapper[4690]: I0320 13:27:19.932946 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9f68"] Mar 20 13:27:19 crc kubenswrapper[4690]: I0320 13:27:19.933389 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-k9f68" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerName="registry-server" containerID="cri-o://02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b" gracePeriod=2 Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.446104 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.491574 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-catalog-content\") pod \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.491655 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flznw\" (UniqueName: \"kubernetes.io/projected/b33d4d78-4e84-48e1-9b17-8427e0bd042e-kube-api-access-flznw\") pod \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.491752 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-utilities\") pod \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\" (UID: \"b33d4d78-4e84-48e1-9b17-8427e0bd042e\") " Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.492686 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-utilities" (OuterVolumeSpecName: "utilities") pod "b33d4d78-4e84-48e1-9b17-8427e0bd042e" (UID: "b33d4d78-4e84-48e1-9b17-8427e0bd042e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.497364 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b33d4d78-4e84-48e1-9b17-8427e0bd042e-kube-api-access-flznw" (OuterVolumeSpecName: "kube-api-access-flznw") pod "b33d4d78-4e84-48e1-9b17-8427e0bd042e" (UID: "b33d4d78-4e84-48e1-9b17-8427e0bd042e"). InnerVolumeSpecName "kube-api-access-flznw". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.526214 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b33d4d78-4e84-48e1-9b17-8427e0bd042e" (UID: "b33d4d78-4e84-48e1-9b17-8427e0bd042e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.593075 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.593107 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b33d4d78-4e84-48e1-9b17-8427e0bd042e-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.593118 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flznw\" (UniqueName: \"kubernetes.io/projected/b33d4d78-4e84-48e1-9b17-8427e0bd042e-kube-api-access-flznw\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.876036 4690 generic.go:334] "Generic (PLEG): container finished" podID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerID="02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b" exitCode=0 Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.876072 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9f68" event={"ID":"b33d4d78-4e84-48e1-9b17-8427e0bd042e","Type":"ContainerDied","Data":"02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b"} Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.876099 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-k9f68" event={"ID":"b33d4d78-4e84-48e1-9b17-8427e0bd042e","Type":"ContainerDied","Data":"fe147e96ea4a9be5f5b36cdaed816d6eba5d076a3ae594c2f9d086c839eb8d60"} Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.876116 4690 scope.go:117] "RemoveContainer" containerID="02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.876156 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-k9f68" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.897101 4690 scope.go:117] "RemoveContainer" containerID="0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.909742 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9f68"] Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.913510 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-k9f68"] Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.919053 4690 scope.go:117] "RemoveContainer" containerID="1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.934377 4690 scope.go:117] "RemoveContainer" containerID="02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b" Mar 20 13:27:20 crc kubenswrapper[4690]: E0320 13:27:20.935764 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b\": container with ID starting with 02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b not found: ID does not exist" containerID="02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.935813 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b"} err="failed to get container status \"02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b\": rpc error: code = NotFound desc = could not find container \"02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b\": container with ID starting with 02c4e621082c410d75f9a2d299fab3b2a8f6e3d593e18f7154f5e64ab345435b not found: ID does not exist" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.935838 4690 scope.go:117] "RemoveContainer" containerID="0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17" Mar 20 13:27:20 crc kubenswrapper[4690]: E0320 13:27:20.936375 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17\": container with ID starting with 0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17 not found: ID does not exist" containerID="0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.936438 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17"} err="failed to get container status \"0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17\": rpc error: code = NotFound desc = could not find container \"0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17\": container with ID starting with 0f302bbfb1a76795ae7339103c48cee3943959dcec43133aba89e88257dc5a17 not found: ID does not exist" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.936464 4690 scope.go:117] "RemoveContainer" containerID="1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9" Mar 20 13:27:20 crc kubenswrapper[4690]: E0320 13:27:20.936819 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9\": container with ID starting with 1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9 not found: ID does not exist" containerID="1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9" Mar 20 13:27:20 crc kubenswrapper[4690]: I0320 13:27:20.936869 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9"} err="failed to get container status \"1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9\": rpc error: code = NotFound desc = could not find container \"1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9\": container with ID starting with 1a8d3ab0210075578694e394f8a7fd3bad34b163c18c9380188b35fedf25d3b9 not found: ID does not exist" Mar 20 13:27:22 crc kubenswrapper[4690]: I0320 13:27:22.423214 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" path="/var/lib/kubelet/pods/b33d4d78-4e84-48e1-9b17-8427e0bd042e/volumes" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.262510 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-664fd65b8c-lljln"] Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.263109 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" podUID="6b503c44-4908-42e5-931c-c44d6337a0ad" containerName="controller-manager" containerID="cri-o://d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40" gracePeriod=30 Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.351278 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r"] Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.351524 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" podUID="5bcf346d-5d59-4b80-97da-fce796c1259b" containerName="route-controller-manager" containerID="cri-o://9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568" gracePeriod=30 Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.864149 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.902178 4690 generic.go:334] "Generic (PLEG): container finished" podID="5bcf346d-5d59-4b80-97da-fce796c1259b" containerID="9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568" exitCode=0 Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.902240 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" event={"ID":"5bcf346d-5d59-4b80-97da-fce796c1259b","Type":"ContainerDied","Data":"9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568"} Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.902265 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" event={"ID":"5bcf346d-5d59-4b80-97da-fce796c1259b","Type":"ContainerDied","Data":"fd07d8c429addf4e11f590440f4c0fa328ce9c1e639b847529005cdbc83e445c"} Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.902281 4690 scope.go:117] "RemoveContainer" containerID="9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.902521 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.902934 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.908824 4690 generic.go:334] "Generic (PLEG): container finished" podID="6b503c44-4908-42e5-931c-c44d6337a0ad" containerID="d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40" exitCode=0 Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.909012 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" event={"ID":"6b503c44-4908-42e5-931c-c44d6337a0ad","Type":"ContainerDied","Data":"d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40"} Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.909119 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" event={"ID":"6b503c44-4908-42e5-931c-c44d6337a0ad","Type":"ContainerDied","Data":"06c821ce66601f268dcd811d039a3e3fc3c398793bda4e7ec65cf6c3bd24af44"} Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.932379 4690 scope.go:117] "RemoveContainer" containerID="9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568" Mar 20 13:27:23 crc kubenswrapper[4690]: E0320 13:27:23.933247 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568\": container with ID starting with 9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568 not found: ID does not exist" containerID="9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.933403 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568"} err="failed to get container status \"9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568\": rpc error: code = NotFound desc = could not find container \"9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568\": container with ID starting with 9d70c1cd092e42ba68ebe58fdf81a9e49f29785304a66e75da14dc0bef69f568 not found: ID does not exist" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.933503 4690 scope.go:117] "RemoveContainer" containerID="d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.933649 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-proxy-ca-bundles\") pod \"6b503c44-4908-42e5-931c-c44d6337a0ad\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.933689 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-config\") pod \"5bcf346d-5d59-4b80-97da-fce796c1259b\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.933741 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7hkx\" (UniqueName: \"kubernetes.io/projected/6b503c44-4908-42e5-931c-c44d6337a0ad-kube-api-access-h7hkx\") pod \"6b503c44-4908-42e5-931c-c44d6337a0ad\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.933804 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-client-ca\") pod \"5bcf346d-5d59-4b80-97da-fce796c1259b\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.933866 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-client-ca\") pod \"6b503c44-4908-42e5-931c-c44d6337a0ad\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.933893 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5rnt6\" (UniqueName: \"kubernetes.io/projected/5bcf346d-5d59-4b80-97da-fce796c1259b-kube-api-access-5rnt6\") pod \"5bcf346d-5d59-4b80-97da-fce796c1259b\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.933956 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-config\") pod \"6b503c44-4908-42e5-931c-c44d6337a0ad\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.934034 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b503c44-4908-42e5-931c-c44d6337a0ad-serving-cert\") pod \"6b503c44-4908-42e5-931c-c44d6337a0ad\" (UID: \"6b503c44-4908-42e5-931c-c44d6337a0ad\") " Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.934108 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bcf346d-5d59-4b80-97da-fce796c1259b-serving-cert\") pod \"5bcf346d-5d59-4b80-97da-fce796c1259b\" (UID: \"5bcf346d-5d59-4b80-97da-fce796c1259b\") " Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.935023 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-config" (OuterVolumeSpecName: "config") pod "5bcf346d-5d59-4b80-97da-fce796c1259b" (UID: "5bcf346d-5d59-4b80-97da-fce796c1259b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.935993 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-client-ca" (OuterVolumeSpecName: "client-ca") pod "6b503c44-4908-42e5-931c-c44d6337a0ad" (UID: "6b503c44-4908-42e5-931c-c44d6337a0ad"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.936146 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-client-ca" (OuterVolumeSpecName: "client-ca") pod "5bcf346d-5d59-4b80-97da-fce796c1259b" (UID: "5bcf346d-5d59-4b80-97da-fce796c1259b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.936370 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-config" (OuterVolumeSpecName: "config") pod "6b503c44-4908-42e5-931c-c44d6337a0ad" (UID: "6b503c44-4908-42e5-931c-c44d6337a0ad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.937156 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6b503c44-4908-42e5-931c-c44d6337a0ad" (UID: "6b503c44-4908-42e5-931c-c44d6337a0ad"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.945695 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5bcf346d-5d59-4b80-97da-fce796c1259b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5bcf346d-5d59-4b80-97da-fce796c1259b" (UID: "5bcf346d-5d59-4b80-97da-fce796c1259b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.945748 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5bcf346d-5d59-4b80-97da-fce796c1259b-kube-api-access-5rnt6" (OuterVolumeSpecName: "kube-api-access-5rnt6") pod "5bcf346d-5d59-4b80-97da-fce796c1259b" (UID: "5bcf346d-5d59-4b80-97da-fce796c1259b"). InnerVolumeSpecName "kube-api-access-5rnt6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.945800 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b503c44-4908-42e5-931c-c44d6337a0ad-kube-api-access-h7hkx" (OuterVolumeSpecName: "kube-api-access-h7hkx") pod "6b503c44-4908-42e5-931c-c44d6337a0ad" (UID: "6b503c44-4908-42e5-931c-c44d6337a0ad"). InnerVolumeSpecName "kube-api-access-h7hkx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.952368 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b503c44-4908-42e5-931c-c44d6337a0ad-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6b503c44-4908-42e5-931c-c44d6337a0ad" (UID: "6b503c44-4908-42e5-931c-c44d6337a0ad"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.979336 4690 scope.go:117] "RemoveContainer" containerID="d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40" Mar 20 13:27:23 crc kubenswrapper[4690]: E0320 13:27:23.979789 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40\": container with ID starting with d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40 not found: ID does not exist" containerID="d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40" Mar 20 13:27:23 crc kubenswrapper[4690]: I0320 13:27:23.979857 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40"} err="failed to get container status \"d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40\": rpc error: code = NotFound desc = could not find container \"d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40\": container with ID starting with d69d94a4234555995fc21ff8bf994c4036e178df87f90c17ad9a211510d3ad40 not found: ID does not exist" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.035349 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5bcf346d-5d59-4b80-97da-fce796c1259b-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.035385 4690 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.035396 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.035405 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7hkx\" (UniqueName: \"kubernetes.io/projected/6b503c44-4908-42e5-931c-c44d6337a0ad-kube-api-access-h7hkx\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.035414 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5bcf346d-5d59-4b80-97da-fce796c1259b-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.035422 4690 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-client-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.035435 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5rnt6\" (UniqueName: \"kubernetes.io/projected/5bcf346d-5d59-4b80-97da-fce796c1259b-kube-api-access-5rnt6\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.035450 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6b503c44-4908-42e5-931c-c44d6337a0ad-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.035460 4690 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6b503c44-4908-42e5-931c-c44d6337a0ad-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.227420 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r"] Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.232681 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7ffb96f869-p7m8r"] Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.375995 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-698498965b-btsfg"] Mar 20 13:27:24 crc kubenswrapper[4690]: E0320 13:27:24.376242 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerName="extract-utilities" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376259 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerName="extract-utilities" Mar 20 13:27:24 crc kubenswrapper[4690]: E0320 13:27:24.376274 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerName="extract-content" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376283 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerName="extract-content" Mar 20 13:27:24 crc kubenswrapper[4690]: E0320 13:27:24.376295 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerName="extract-utilities" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376302 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerName="extract-utilities" Mar 20 13:27:24 crc kubenswrapper[4690]: E0320 13:27:24.376318 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerName="extract-content" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376325 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerName="extract-content" Mar 20 13:27:24 crc kubenswrapper[4690]: E0320 13:27:24.376337 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerName="registry-server" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376344 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerName="registry-server" Mar 20 13:27:24 crc kubenswrapper[4690]: E0320 13:27:24.376354 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerName="registry-server" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376360 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerName="registry-server" Mar 20 13:27:24 crc kubenswrapper[4690]: E0320 13:27:24.376370 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5bcf346d-5d59-4b80-97da-fce796c1259b" containerName="route-controller-manager" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376377 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5bcf346d-5d59-4b80-97da-fce796c1259b" containerName="route-controller-manager" Mar 20 13:27:24 crc kubenswrapper[4690]: E0320 13:27:24.376391 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b503c44-4908-42e5-931c-c44d6337a0ad" containerName="controller-manager" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376397 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b503c44-4908-42e5-931c-c44d6337a0ad" containerName="controller-manager" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376495 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5bcf346d-5d59-4b80-97da-fce796c1259b" containerName="route-controller-manager" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376510 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b503c44-4908-42e5-931c-c44d6337a0ad" containerName="controller-manager" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376520 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8184a4a-79e5-491e-8e56-ebf0bea4601f" containerName="registry-server" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376533 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="b33d4d78-4e84-48e1-9b17-8427e0bd042e" containerName="registry-server" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.376955 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.386029 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-698498965b-btsfg"] Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.430519 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5bcf346d-5d59-4b80-97da-fce796c1259b" path="/var/lib/kubelet/pods/5bcf346d-5d59-4b80-97da-fce796c1259b/volumes" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.441439 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c6a75f-fe50-441a-a617-471a870e7b54-config\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.441522 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19c6a75f-fe50-441a-a617-471a870e7b54-proxy-ca-bundles\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.441566 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19c6a75f-fe50-441a-a617-471a870e7b54-client-ca\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.441612 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19c6a75f-fe50-441a-a617-471a870e7b54-serving-cert\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.441636 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chwls\" (UniqueName: \"kubernetes.io/projected/19c6a75f-fe50-441a-a617-471a870e7b54-kube-api-access-chwls\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.542876 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c6a75f-fe50-441a-a617-471a870e7b54-config\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.542932 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19c6a75f-fe50-441a-a617-471a870e7b54-proxy-ca-bundles\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.542962 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19c6a75f-fe50-441a-a617-471a870e7b54-client-ca\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.542999 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19c6a75f-fe50-441a-a617-471a870e7b54-serving-cert\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.543021 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chwls\" (UniqueName: \"kubernetes.io/projected/19c6a75f-fe50-441a-a617-471a870e7b54-kube-api-access-chwls\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.544366 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/19c6a75f-fe50-441a-a617-471a870e7b54-client-ca\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.544451 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/19c6a75f-fe50-441a-a617-471a870e7b54-config\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.544822 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/19c6a75f-fe50-441a-a617-471a870e7b54-proxy-ca-bundles\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.556359 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/19c6a75f-fe50-441a-a617-471a870e7b54-serving-cert\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.565931 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chwls\" (UniqueName: \"kubernetes.io/projected/19c6a75f-fe50-441a-a617-471a870e7b54-kube-api-access-chwls\") pod \"controller-manager-698498965b-btsfg\" (UID: \"19c6a75f-fe50-441a-a617-471a870e7b54\") " pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.696855 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.918134 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-664fd65b8c-lljln" Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.939747 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-664fd65b8c-lljln"] Mar 20 13:27:24 crc kubenswrapper[4690]: I0320 13:27:24.942231 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-664fd65b8c-lljln"] Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.118927 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-698498965b-btsfg"] Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.376587 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm"] Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.377224 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.379628 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.379818 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.380281 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.380532 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.381167 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.381285 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.397575 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm"] Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.453549 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sf5dt\" (UniqueName: \"kubernetes.io/projected/e4012100-f10c-4748-b194-ff123778b732-kube-api-access-sf5dt\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.453636 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e4012100-f10c-4748-b194-ff123778b732-client-ca\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.453682 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4012100-f10c-4748-b194-ff123778b732-config\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.453712 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4012100-f10c-4748-b194-ff123778b732-serving-cert\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.555416 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e4012100-f10c-4748-b194-ff123778b732-client-ca\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.556029 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4012100-f10c-4748-b194-ff123778b732-config\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.556062 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4012100-f10c-4748-b194-ff123778b732-serving-cert\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.556117 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sf5dt\" (UniqueName: \"kubernetes.io/projected/e4012100-f10c-4748-b194-ff123778b732-kube-api-access-sf5dt\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.556618 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e4012100-f10c-4748-b194-ff123778b732-client-ca\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.557638 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e4012100-f10c-4748-b194-ff123778b732-config\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.570413 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e4012100-f10c-4748-b194-ff123778b732-serving-cert\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.622044 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sf5dt\" (UniqueName: \"kubernetes.io/projected/e4012100-f10c-4748-b194-ff123778b732-kube-api-access-sf5dt\") pod \"route-controller-manager-5799fc654d-krtfm\" (UID: \"e4012100-f10c-4748-b194-ff123778b732\") " pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.695346 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.927814 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-698498965b-btsfg" event={"ID":"19c6a75f-fe50-441a-a617-471a870e7b54","Type":"ContainerStarted","Data":"263b62e579459be2f4767e1af67c5c99bcdda2233db73629ec67f7df56891d94"} Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.927864 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-698498965b-btsfg" event={"ID":"19c6a75f-fe50-441a-a617-471a870e7b54","Type":"ContainerStarted","Data":"652203b6dbd1ce41b93bf42d020c8ca779e836dcd99e4349faa27c84583c087f"} Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.928889 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.941663 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-698498965b-btsfg" Mar 20 13:27:25 crc kubenswrapper[4690]: I0320 13:27:25.946670 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-698498965b-btsfg" podStartSLOduration=2.946654345 podStartE2EDuration="2.946654345s" podCreationTimestamp="2026-03-20 13:27:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:27:25.944247455 +0000 UTC m=+292.233847398" watchObservedRunningTime="2026-03-20 13:27:25.946654345 +0000 UTC m=+292.236254288" Mar 20 13:27:26 crc kubenswrapper[4690]: I0320 13:27:26.153575 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm"] Mar 20 13:27:26 crc kubenswrapper[4690]: W0320 13:27:26.159139 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode4012100_f10c_4748_b194_ff123778b732.slice/crio-da519f222f0c4e45b35342b5c316b8d72195d4b56ffabb95c448c5d69355520f WatchSource:0}: Error finding container da519f222f0c4e45b35342b5c316b8d72195d4b56ffabb95c448c5d69355520f: Status 404 returned error can't find the container with id da519f222f0c4e45b35342b5c316b8d72195d4b56ffabb95c448c5d69355520f Mar 20 13:27:26 crc kubenswrapper[4690]: I0320 13:27:26.424310 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b503c44-4908-42e5-931c-c44d6337a0ad" path="/var/lib/kubelet/pods/6b503c44-4908-42e5-931c-c44d6337a0ad/volumes" Mar 20 13:27:26 crc kubenswrapper[4690]: I0320 13:27:26.941869 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" event={"ID":"e4012100-f10c-4748-b194-ff123778b732","Type":"ContainerStarted","Data":"07b757f553088a04677f712af8ac83f393c175bf25860536f5eaa7a0a74b86ca"} Mar 20 13:27:26 crc kubenswrapper[4690]: I0320 13:27:26.941955 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" event={"ID":"e4012100-f10c-4748-b194-ff123778b732","Type":"ContainerStarted","Data":"da519f222f0c4e45b35342b5c316b8d72195d4b56ffabb95c448c5d69355520f"} Mar 20 13:27:26 crc kubenswrapper[4690]: I0320 13:27:26.963660 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" podStartSLOduration=3.9636417870000002 podStartE2EDuration="3.963641787s" podCreationTimestamp="2026-03-20 13:27:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:27:26.963305326 +0000 UTC m=+293.252905269" watchObservedRunningTime="2026-03-20 13:27:26.963641787 +0000 UTC m=+293.253241730" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.360891 4690 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.361635 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.361997 4690 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.362683 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710" gracePeriod=15 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.362746 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1" gracePeriod=15 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.362768 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad" gracePeriod=15 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.362790 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376" gracePeriod=15 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.362913 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0" gracePeriod=15 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363437 4690 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.363797 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363817 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.363830 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363838 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.363876 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363884 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.363895 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363903 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.363914 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363921 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.363934 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363941 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.363952 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363959 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.363967 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363975 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.363986 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.363993 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.364004 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364012 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364124 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364138 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364148 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364157 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364168 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364179 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364189 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364199 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.364389 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.380233 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.380298 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.380321 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.380345 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.380384 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.400469 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481603 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481673 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481697 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481722 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481739 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481761 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481781 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481816 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481908 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481958 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.481984 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.482005 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.482027 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.583580 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.583661 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.583705 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.583788 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.583835 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.583880 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.697961 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:27:27 crc kubenswrapper[4690]: W0320 13:27:27.718284 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-0e5cad8e590d70606f5f2574d68deb7695faf7df9d0e230ccba7e79536ae9d74 WatchSource:0}: Error finding container 0e5cad8e590d70606f5f2574d68deb7695faf7df9d0e230ccba7e79536ae9d74: Status 404 returned error can't find the container with id 0e5cad8e590d70606f5f2574d68deb7695faf7df9d0e230ccba7e79536ae9d74 Mar 20 13:27:27 crc kubenswrapper[4690]: E0320 13:27:27.721710 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.204:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.189e8fa65133883e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:27:27.720228926 +0000 UTC m=+294.009828859,LastTimestamp:2026-03-20 13:27:27.720228926 +0000 UTC m=+294.009828859,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.947982 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/3.log" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.950331 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.951294 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710" exitCode=0 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.951395 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad" exitCode=0 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.951468 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0" exitCode=0 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.951530 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1" exitCode=2 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.951396 4690 scope.go:117] "RemoveContainer" containerID="b846951093daaabf272d8fce5eaaf4145788766e905c636bf3de8a9ae1d536a9" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.953360 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"0e5cad8e590d70606f5f2574d68deb7695faf7df9d0e230ccba7e79536ae9d74"} Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.955302 4690 generic.go:334] "Generic (PLEG): container finished" podID="5e356e5c-eaed-4153-bf94-c373d10612ac" containerID="945df3ab20791ea32df0b9874d35b29ceb81b448bcb1cfac8119164b6ac3b3d2" exitCode=0 Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.955423 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5e356e5c-eaed-4153-bf94-c373d10612ac","Type":"ContainerDied","Data":"945df3ab20791ea32df0b9874d35b29ceb81b448bcb1cfac8119164b6ac3b3d2"} Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.956101 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.956932 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.957443 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.958958 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.960760 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.961348 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.961634 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.962305 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:27 crc kubenswrapper[4690]: I0320 13:27:27.962622 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:28 crc kubenswrapper[4690]: E0320 13:27:28.638400 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:28 crc kubenswrapper[4690]: E0320 13:27:28.639318 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:28 crc kubenswrapper[4690]: E0320 13:27:28.639894 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:28 crc kubenswrapper[4690]: E0320 13:27:28.640369 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:28 crc kubenswrapper[4690]: E0320 13:27:28.640752 4690 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:28 crc kubenswrapper[4690]: I0320 13:27:28.640790 4690 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Mar 20 13:27:28 crc kubenswrapper[4690]: E0320 13:27:28.641135 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="200ms" Mar 20 13:27:28 crc kubenswrapper[4690]: E0320 13:27:28.842068 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="400ms" Mar 20 13:27:28 crc kubenswrapper[4690]: I0320 13:27:28.963816 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd"} Mar 20 13:27:28 crc kubenswrapper[4690]: I0320 13:27:28.966651 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:28 crc kubenswrapper[4690]: I0320 13:27:28.967297 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:28 crc kubenswrapper[4690]: I0320 13:27:28.967991 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:28 crc kubenswrapper[4690]: I0320 13:27:28.971202 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Mar 20 13:27:29 crc kubenswrapper[4690]: E0320 13:27:29.243894 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="800ms" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.397535 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.398253 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.398504 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.398820 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.506832 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e356e5c-eaed-4153-bf94-c373d10612ac-kube-api-access\") pod \"5e356e5c-eaed-4153-bf94-c373d10612ac\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.507276 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-kubelet-dir\") pod \"5e356e5c-eaed-4153-bf94-c373d10612ac\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.507371 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-var-lock\") pod \"5e356e5c-eaed-4153-bf94-c373d10612ac\" (UID: \"5e356e5c-eaed-4153-bf94-c373d10612ac\") " Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.508105 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-var-lock" (OuterVolumeSpecName: "var-lock") pod "5e356e5c-eaed-4153-bf94-c373d10612ac" (UID: "5e356e5c-eaed-4153-bf94-c373d10612ac"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.509115 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "5e356e5c-eaed-4153-bf94-c373d10612ac" (UID: "5e356e5c-eaed-4153-bf94-c373d10612ac"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.520136 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e356e5c-eaed-4153-bf94-c373d10612ac-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "5e356e5c-eaed-4153-bf94-c373d10612ac" (UID: "5e356e5c-eaed-4153-bf94-c373d10612ac"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.609215 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5e356e5c-eaed-4153-bf94-c373d10612ac-kube-api-access\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.609273 4690 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-kubelet-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.609288 4690 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/5e356e5c-eaed-4153-bf94-c373d10612ac-var-lock\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.719987 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.720913 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.721485 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.721911 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.722153 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.722434 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.811933 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.811981 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.812000 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.812069 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.812100 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.812206 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.812261 4690 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.812272 4690 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.914248 4690 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.982154 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.982818 4690 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376" exitCode=0 Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.982888 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.982946 4690 scope.go:117] "RemoveContainer" containerID="3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.986489 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"5e356e5c-eaed-4153-bf94-c373d10612ac","Type":"ContainerDied","Data":"28aeb6ed72eba0e1ad86decca9250cbfecddd78cc1c76e42576a276bf4572835"} Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.986534 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28aeb6ed72eba0e1ad86decca9250cbfecddd78cc1c76e42576a276bf4572835" Mar 20 13:27:29 crc kubenswrapper[4690]: I0320 13:27:29.986610 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.006877 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.007308 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.007545 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.008330 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.008869 4690 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.009238 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.009518 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.009756 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.012219 4690 scope.go:117] "RemoveContainer" containerID="92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.024306 4690 scope.go:117] "RemoveContainer" containerID="825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.036583 4690 scope.go:117] "RemoveContainer" containerID="2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1" Mar 20 13:27:30 crc kubenswrapper[4690]: E0320 13:27:30.045611 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="1.6s" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.054006 4690 scope.go:117] "RemoveContainer" containerID="2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.069592 4690 scope.go:117] "RemoveContainer" containerID="a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.086133 4690 scope.go:117] "RemoveContainer" containerID="3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710" Mar 20 13:27:30 crc kubenswrapper[4690]: E0320 13:27:30.090489 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\": container with ID starting with 3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710 not found: ID does not exist" containerID="3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.090536 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710"} err="failed to get container status \"3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\": rpc error: code = NotFound desc = could not find container \"3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710\": container with ID starting with 3696ed45c6c49b532dedd048752cba5797859ec56ef8191f2592d7f95ad96710 not found: ID does not exist" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.090560 4690 scope.go:117] "RemoveContainer" containerID="92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad" Mar 20 13:27:30 crc kubenswrapper[4690]: E0320 13:27:30.092132 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\": container with ID starting with 92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad not found: ID does not exist" containerID="92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.092160 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad"} err="failed to get container status \"92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\": rpc error: code = NotFound desc = could not find container \"92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad\": container with ID starting with 92c1ea739c49f6f865222420ab6c3b5d72de5dbc14da3a75c11d786c9832e2ad not found: ID does not exist" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.092179 4690 scope.go:117] "RemoveContainer" containerID="825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0" Mar 20 13:27:30 crc kubenswrapper[4690]: E0320 13:27:30.093031 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\": container with ID starting with 825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0 not found: ID does not exist" containerID="825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.093057 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0"} err="failed to get container status \"825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\": rpc error: code = NotFound desc = could not find container \"825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0\": container with ID starting with 825a8efbc30b59b18b56b8fd357c6380be640fc8a46b31a82a874984e5b6b8a0 not found: ID does not exist" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.093076 4690 scope.go:117] "RemoveContainer" containerID="2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1" Mar 20 13:27:30 crc kubenswrapper[4690]: E0320 13:27:30.093314 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\": container with ID starting with 2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1 not found: ID does not exist" containerID="2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.093345 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1"} err="failed to get container status \"2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\": rpc error: code = NotFound desc = could not find container \"2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1\": container with ID starting with 2f9d2dcc1feb8a24fdfc007b6c12d81b533d506224feb39851d6b769b8e58de1 not found: ID does not exist" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.093363 4690 scope.go:117] "RemoveContainer" containerID="2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376" Mar 20 13:27:30 crc kubenswrapper[4690]: E0320 13:27:30.094082 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\": container with ID starting with 2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376 not found: ID does not exist" containerID="2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.094104 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376"} err="failed to get container status \"2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\": rpc error: code = NotFound desc = could not find container \"2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376\": container with ID starting with 2e8f9023acb81edb3944da6f3cfc95e17d880ff1c1c10d7d685eca919c526376 not found: ID does not exist" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.094118 4690 scope.go:117] "RemoveContainer" containerID="a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516" Mar 20 13:27:30 crc kubenswrapper[4690]: E0320 13:27:30.094381 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\": container with ID starting with a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516 not found: ID does not exist" containerID="a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.094408 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516"} err="failed to get container status \"a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\": rpc error: code = NotFound desc = could not find container \"a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516\": container with ID starting with a223205b440f4b4f42b41d335c36b5bbe6a59b9b7c38e1e3b425bc18f13e6516 not found: ID does not exist" Mar 20 13:27:30 crc kubenswrapper[4690]: I0320 13:27:30.426689 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Mar 20 13:27:31 crc kubenswrapper[4690]: E0320 13:27:31.647314 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="3.2s" Mar 20 13:27:32 crc kubenswrapper[4690]: E0320 13:27:32.431965 4690 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.204:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" volumeName="registry-storage" Mar 20 13:27:33 crc kubenswrapper[4690]: I0320 13:27:33.830227 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:27:33 crc kubenswrapper[4690]: I0320 13:27:33.830362 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:27:33 crc kubenswrapper[4690]: I0320 13:27:33.830502 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:27:33 crc kubenswrapper[4690]: I0320 13:27:33.831901 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d"} pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 13:27:33 crc kubenswrapper[4690]: I0320 13:27:33.832061 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" containerID="cri-o://f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d" gracePeriod=600 Mar 20 13:27:34 crc kubenswrapper[4690]: I0320 13:27:34.023159 4690 generic.go:334] "Generic (PLEG): container finished" podID="60ded650-b298-4115-8286-8969b94d4062" containerID="f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d" exitCode=0 Mar 20 13:27:34 crc kubenswrapper[4690]: I0320 13:27:34.023588 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerDied","Data":"f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d"} Mar 20 13:27:34 crc kubenswrapper[4690]: I0320 13:27:34.417311 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:34 crc kubenswrapper[4690]: I0320 13:27:34.418040 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:34 crc kubenswrapper[4690]: I0320 13:27:34.418352 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:34 crc kubenswrapper[4690]: E0320 13:27:34.849317 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="6.4s" Mar 20 13:27:35 crc kubenswrapper[4690]: I0320 13:27:35.033189 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"401f09af9f70f34d1e8ddfc73dfb7eb16961c2dcf3c7b9645411a300c57fc579"} Mar 20 13:27:35 crc kubenswrapper[4690]: I0320 13:27:35.034369 4690 status_manager.go:851] "Failed to get status for pod" podUID="60ded650-b298-4115-8286-8969b94d4062" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-ftcqx\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:35 crc kubenswrapper[4690]: I0320 13:27:35.035257 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:35 crc kubenswrapper[4690]: I0320 13:27:35.035495 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:35 crc kubenswrapper[4690]: I0320 13:27:35.035779 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:35 crc kubenswrapper[4690]: E0320 13:27:35.076244 4690 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.204:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.189e8fa65133883e openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-03-20 13:27:27.720228926 +0000 UTC m=+294.009828859,LastTimestamp:2026-03-20 13:27:27.720228926 +0000 UTC m=+294.009828859,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.128306 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.129385 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.129444 4690 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de" exitCode=1 Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.129474 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de"} Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.129958 4690 scope.go:117] "RemoveContainer" containerID="84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de" Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.130252 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.130475 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.130717 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.130943 4690 status_manager.go:851] "Failed to get status for pod" podUID="60ded650-b298-4115-8286-8969b94d4062" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-ftcqx\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:40 crc kubenswrapper[4690]: I0320 13:27:40.131150 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.141066 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.142646 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.142764 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"94af701aaf02b93b607b2656bf0b2ab233c1b7b8f9dab06bdfd555305c4b1c49"} Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.143991 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.144578 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.145285 4690 status_manager.go:851] "Failed to get status for pod" podUID="60ded650-b298-4115-8286-8969b94d4062" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-ftcqx\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.146612 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.147176 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: E0320 13:27:41.250461 4690 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.204:6443: connect: connection refused" interval="7s" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.414507 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.415793 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.418566 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.419762 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.420385 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.420831 4690 status_manager.go:851] "Failed to get status for pod" podUID="60ded650-b298-4115-8286-8969b94d4062" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-ftcqx\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.436758 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.436813 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:41 crc kubenswrapper[4690]: E0320 13:27:41.437397 4690 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:41 crc kubenswrapper[4690]: I0320 13:27:41.438394 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.151367 4690 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="0f67e5779bdee8755703aeabbe776bc5c1fd5461bd32ccf36a790805e6d26398" exitCode=0 Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.151509 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"0f67e5779bdee8755703aeabbe776bc5c1fd5461bd32ccf36a790805e6d26398"} Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.153143 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"553f4f4996bb12c4a97c285990050ee6511f6fe76c9b677c26fd6d762193ae31"} Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.153701 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.153736 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:42 crc kubenswrapper[4690]: E0320 13:27:42.154370 4690 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.154397 4690 status_manager.go:851] "Failed to get status for pod" podUID="60ded650-b298-4115-8286-8969b94d4062" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-ftcqx\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.155258 4690 status_manager.go:851] "Failed to get status for pod" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.155736 4690 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.156235 4690 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:42 crc kubenswrapper[4690]: I0320 13:27:42.156686 4690 status_manager.go:851] "Failed to get status for pod" podUID="e4012100-f10c-4748-b194-ff123778b732" pod="openshift-route-controller-manager/route-controller-manager-5799fc654d-krtfm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5799fc654d-krtfm\": dial tcp 38.102.83.204:6443: connect: connection refused" Mar 20 13:27:43 crc kubenswrapper[4690]: I0320 13:27:43.168692 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:27:43 crc kubenswrapper[4690]: I0320 13:27:43.168785 4690 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Mar 20 13:27:43 crc kubenswrapper[4690]: I0320 13:27:43.168832 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Mar 20 13:27:43 crc kubenswrapper[4690]: I0320 13:27:43.178085 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"e67ef5d75e53614ac6e9e45db72e0b4635e213b4180b3b8b927d05a6969b312e"} Mar 20 13:27:43 crc kubenswrapper[4690]: I0320 13:27:43.178133 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"f271549db68b0855ddf4bd9d0d416a67e8247a399808789c22f562413a597922"} Mar 20 13:27:43 crc kubenswrapper[4690]: I0320 13:27:43.178149 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"624cc21386c785480c9052e550302e5f1c3745872a1b8f6d4d4474825913a4cc"} Mar 20 13:27:43 crc kubenswrapper[4690]: I0320 13:27:43.178161 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c254fc9fb58c8eb5243ac92b4f0218a594c7e26b6f63a505286ff9ecb8fa8d45"} Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.156576 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" podUID="4694c58d-e630-4eff-a677-d13aca00fcab" containerName="oauth-openshift" containerID="cri-o://0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd" gracePeriod=15 Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.186085 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"585b4f9ea1d8142d31d58f1a7e914582d9aa36c74bc59cd6a24f5ae2c975a30a"} Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.186622 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.186354 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.186811 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.709461 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.713673 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-cliconfig\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.713725 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-provider-selection\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.713780 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-idp-0-file-data\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.713814 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-router-certs\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.713878 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-trusted-ca-bundle\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.713920 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2sz4\" (UniqueName: \"kubernetes.io/projected/4694c58d-e630-4eff-a677-d13aca00fcab-kube-api-access-q2sz4\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.713954 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-serving-cert\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.714009 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-service-ca\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.714046 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-error\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.714081 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4694c58d-e630-4eff-a677-d13aca00fcab-audit-dir\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.714114 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-login\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.714152 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-audit-policies\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.714191 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-session\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.714228 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-ocp-branding-template\") pod \"4694c58d-e630-4eff-a677-d13aca00fcab\" (UID: \"4694c58d-e630-4eff-a677-d13aca00fcab\") " Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.714378 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.715112 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4694c58d-e630-4eff-a677-d13aca00fcab-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.715373 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.715576 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.715687 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.716315 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.720388 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.733049 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.733125 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4694c58d-e630-4eff-a677-d13aca00fcab-kube-api-access-q2sz4" (OuterVolumeSpecName: "kube-api-access-q2sz4") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "kube-api-access-q2sz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.733280 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.733444 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.733921 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.734167 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.734662 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.734693 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "4694c58d-e630-4eff-a677-d13aca00fcab" (UID: "4694c58d-e630-4eff-a677-d13aca00fcab"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817093 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817130 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817143 4690 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4694c58d-e630-4eff-a677-d13aca00fcab-audit-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817178 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817189 4690 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-audit-policies\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817198 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817207 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817218 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817230 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817243 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817254 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817267 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2sz4\" (UniqueName: \"kubernetes.io/projected/4694c58d-e630-4eff-a677-d13aca00fcab-kube-api-access-q2sz4\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:44 crc kubenswrapper[4690]: I0320 13:27:44.817279 4690 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/4694c58d-e630-4eff-a677-d13aca00fcab-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:27:45 crc kubenswrapper[4690]: I0320 13:27:45.196085 4690 generic.go:334] "Generic (PLEG): container finished" podID="4694c58d-e630-4eff-a677-d13aca00fcab" containerID="0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd" exitCode=0 Mar 20 13:27:45 crc kubenswrapper[4690]: I0320 13:27:45.196143 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" Mar 20 13:27:45 crc kubenswrapper[4690]: I0320 13:27:45.196144 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" event={"ID":"4694c58d-e630-4eff-a677-d13aca00fcab","Type":"ContainerDied","Data":"0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd"} Mar 20 13:27:45 crc kubenswrapper[4690]: I0320 13:27:45.196214 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-rj8zv" event={"ID":"4694c58d-e630-4eff-a677-d13aca00fcab","Type":"ContainerDied","Data":"6b7b4985fbc7aea0912961a61fada869e8914a506a341a4c4d0d24ef5627924c"} Mar 20 13:27:45 crc kubenswrapper[4690]: I0320 13:27:45.196244 4690 scope.go:117] "RemoveContainer" containerID="0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd" Mar 20 13:27:45 crc kubenswrapper[4690]: I0320 13:27:45.220378 4690 scope.go:117] "RemoveContainer" containerID="0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd" Mar 20 13:27:45 crc kubenswrapper[4690]: E0320 13:27:45.220912 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd\": container with ID starting with 0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd not found: ID does not exist" containerID="0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd" Mar 20 13:27:45 crc kubenswrapper[4690]: I0320 13:27:45.220996 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd"} err="failed to get container status \"0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd\": rpc error: code = NotFound desc = could not find container \"0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd\": container with ID starting with 0c6c650b7fb4efc5b191267bfc1513d222b0a25db078d7b930a1477271f566fd not found: ID does not exist" Mar 20 13:27:45 crc kubenswrapper[4690]: I0320 13:27:45.386288 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:27:46 crc kubenswrapper[4690]: I0320 13:27:46.438709 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:46 crc kubenswrapper[4690]: I0320 13:27:46.438774 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:46 crc kubenswrapper[4690]: I0320 13:27:46.448250 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:49 crc kubenswrapper[4690]: I0320 13:27:49.198450 4690 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:49 crc kubenswrapper[4690]: I0320 13:27:49.231621 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:49 crc kubenswrapper[4690]: I0320 13:27:49.231655 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:49 crc kubenswrapper[4690]: I0320 13:27:49.235353 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:27:49 crc kubenswrapper[4690]: I0320 13:27:49.322559 4690 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8e08e246-88a3-475c-a409-fe9edf29e35c" Mar 20 13:27:50 crc kubenswrapper[4690]: I0320 13:27:50.239706 4690 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:50 crc kubenswrapper[4690]: I0320 13:27:50.240173 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="105ebbdb-a98c-4fca-bf5f-667f0090e9c2" Mar 20 13:27:50 crc kubenswrapper[4690]: I0320 13:27:50.244362 4690 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8e08e246-88a3-475c-a409-fe9edf29e35c" Mar 20 13:27:53 crc kubenswrapper[4690]: I0320 13:27:53.169761 4690 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Mar 20 13:27:53 crc kubenswrapper[4690]: I0320 13:27:53.170533 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Mar 20 13:27:58 crc kubenswrapper[4690]: I0320 13:27:58.725320 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Mar 20 13:27:58 crc kubenswrapper[4690]: I0320 13:27:58.888085 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Mar 20 13:27:58 crc kubenswrapper[4690]: I0320 13:27:58.939032 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Mar 20 13:27:59 crc kubenswrapper[4690]: I0320 13:27:59.215076 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Mar 20 13:27:59 crc kubenswrapper[4690]: I0320 13:27:59.223793 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Mar 20 13:27:59 crc kubenswrapper[4690]: I0320 13:27:59.315807 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Mar 20 13:27:59 crc kubenswrapper[4690]: I0320 13:27:59.982233 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Mar 20 13:28:00 crc kubenswrapper[4690]: I0320 13:28:00.291511 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Mar 20 13:28:00 crc kubenswrapper[4690]: I0320 13:28:00.293978 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Mar 20 13:28:00 crc kubenswrapper[4690]: I0320 13:28:00.350160 4690 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Mar 20 13:28:00 crc kubenswrapper[4690]: I0320 13:28:00.603285 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Mar 20 13:28:00 crc kubenswrapper[4690]: I0320 13:28:00.608718 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Mar 20 13:28:00 crc kubenswrapper[4690]: I0320 13:28:00.786707 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Mar 20 13:28:00 crc kubenswrapper[4690]: I0320 13:28:00.927360 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Mar 20 13:28:00 crc kubenswrapper[4690]: I0320 13:28:00.963826 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Mar 20 13:28:01 crc kubenswrapper[4690]: I0320 13:28:01.312245 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Mar 20 13:28:01 crc kubenswrapper[4690]: I0320 13:28:01.331785 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Mar 20 13:28:01 crc kubenswrapper[4690]: I0320 13:28:01.431331 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Mar 20 13:28:01 crc kubenswrapper[4690]: I0320 13:28:01.807535 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Mar 20 13:28:01 crc kubenswrapper[4690]: I0320 13:28:01.863815 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Mar 20 13:28:01 crc kubenswrapper[4690]: I0320 13:28:01.877672 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Mar 20 13:28:01 crc kubenswrapper[4690]: I0320 13:28:01.981165 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.061900 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.089753 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.127772 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.138331 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.200662 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.283669 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.369305 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.377084 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.530194 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.571101 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.600232 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.659801 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.719527 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.905714 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.917698 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.932330 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Mar 20 13:28:02 crc kubenswrapper[4690]: I0320 13:28:02.933899 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.026611 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.111481 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.169828 4690 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.169954 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.170022 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.170038 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.171044 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"94af701aaf02b93b607b2656bf0b2ab233c1b7b8f9dab06bdfd555305c4b1c49"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.171248 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://94af701aaf02b93b607b2656bf0b2ab233c1b7b8f9dab06bdfd555305c4b1c49" gracePeriod=30 Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.282196 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.336374 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.378406 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.526587 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.598107 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.658396 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.789389 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.900102 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.918017 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Mar 20 13:28:03 crc kubenswrapper[4690]: I0320 13:28:03.971272 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.028446 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.060713 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.175244 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.213939 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.229452 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.346688 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.513397 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.531640 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.536644 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.617441 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.838378 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.881695 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.904824 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Mar 20 13:28:04 crc kubenswrapper[4690]: I0320 13:28:04.996377 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.012149 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.013486 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.023620 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.040498 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.065273 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.100288 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.109326 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.158993 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.309227 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.328489 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.406536 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.582213 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.582245 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.631969 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.682331 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.709483 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.719976 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.727138 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.772790 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.872164 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.875083 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.918369 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.947866 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Mar 20 13:28:05 crc kubenswrapper[4690]: I0320 13:28:05.986036 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.076097 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.102456 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.171818 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.304648 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.350185 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.376203 4690 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.509469 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.526463 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.526952 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.598998 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.627478 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Mar 20 13:28:06 crc kubenswrapper[4690]: I0320 13:28:06.644283 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.042430 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.161708 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.367732 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.427632 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.428006 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.468493 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.610353 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.621357 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.688159 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.739255 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.786283 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.800872 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.826377 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.937676 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.939147 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.950250 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.964447 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Mar 20 13:28:07 crc kubenswrapper[4690]: I0320 13:28:07.998678 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.008070 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.201429 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.245652 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.247121 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.267996 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.275779 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.304665 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.349902 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.425277 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.447285 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.492535 4690 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.493278 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=41.49325988 podStartE2EDuration="41.49325988s" podCreationTimestamp="2026-03-20 13:27:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:27:49.228948863 +0000 UTC m=+315.518548826" watchObservedRunningTime="2026-03-20 13:28:08.49325988 +0000 UTC m=+334.782859833" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.497994 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-rj8zv","openshift-kube-apiserver/kube-apiserver-crc"] Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.498072 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.503462 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.523602 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=19.52358475 podStartE2EDuration="19.52358475s" podCreationTimestamp="2026-03-20 13:27:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:28:08.521001462 +0000 UTC m=+334.810601415" watchObservedRunningTime="2026-03-20 13:28:08.52358475 +0000 UTC m=+334.813184693" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.601222 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.606340 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.685660 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.720465 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.730301 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.792508 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.832169 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Mar 20 13:28:08 crc kubenswrapper[4690]: I0320 13:28:08.904059 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.028326 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.097332 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.115311 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.191017 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.231289 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.465894 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.466537 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.604690 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.642950 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.672570 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.687816 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.698611 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.707210 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.733098 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.749831 4690 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.811335 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.874302 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.890682 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.912472 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.929583 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.942561 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.971386 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.972098 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Mar 20 13:28:09 crc kubenswrapper[4690]: I0320 13:28:09.988105 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.105017 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.123657 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.142634 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6b98686f74-shbz7"] Mar 20 13:28:10 crc kubenswrapper[4690]: E0320 13:28:10.142875 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4694c58d-e630-4eff-a677-d13aca00fcab" containerName="oauth-openshift" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.142888 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="4694c58d-e630-4eff-a677-d13aca00fcab" containerName="oauth-openshift" Mar 20 13:28:10 crc kubenswrapper[4690]: E0320 13:28:10.142913 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" containerName="installer" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.142919 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" containerName="installer" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.143007 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="4694c58d-e630-4eff-a677-d13aca00fcab" containerName="oauth-openshift" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.143027 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e356e5c-eaed-4153-bf94-c373d10612ac" containerName="installer" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.143383 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.147807 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.147989 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.148623 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.149197 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.149250 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.149207 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.149635 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.149763 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.149907 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.151159 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.151536 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.153687 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.154783 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.157389 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6b98686f74-shbz7"] Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.166565 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.169590 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.175041 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.181555 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249080 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-service-ca\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249124 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-template-login\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249160 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249187 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249218 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-template-error\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249254 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whv8w\" (UniqueName: \"kubernetes.io/projected/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-kube-api-access-whv8w\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249320 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249426 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-audit-dir\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249446 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249467 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-audit-policies\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249491 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249505 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-session\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249534 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.249561 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-router-certs\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.350490 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.350561 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.350616 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-template-error\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.350671 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whv8w\" (UniqueName: \"kubernetes.io/projected/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-kube-api-access-whv8w\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.350713 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.350748 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-audit-dir\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.350782 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.350931 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-audit-dir\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.350938 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-audit-policies\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.351005 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.351032 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-session\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.351065 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.351092 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-router-certs\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.351130 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-service-ca\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.351157 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-template-login\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.351501 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.352209 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-audit-policies\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.352417 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.353159 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-service-ca\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.357117 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-session\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.357388 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-template-login\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.357492 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-template-error\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.357359 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.359557 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.360292 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.360745 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.361799 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-v4-0-config-system-router-certs\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.379649 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whv8w\" (UniqueName: \"kubernetes.io/projected/bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f-kube-api-access-whv8w\") pod \"oauth-openshift-6b98686f74-shbz7\" (UID: \"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f\") " pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.395460 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.423094 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4694c58d-e630-4eff-a677-d13aca00fcab" path="/var/lib/kubelet/pods/4694c58d-e630-4eff-a677-d13aca00fcab/volumes" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.460118 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.541955 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.583724 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.695959 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.738479 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.860127 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.860694 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6b98686f74-shbz7"] Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.873299 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.923500 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.935324 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Mar 20 13:28:10 crc kubenswrapper[4690]: I0320 13:28:10.990468 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.006950 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.014191 4690 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.056539 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.108880 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.146969 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.170670 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.248117 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.300256 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.366186 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.388154 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" event={"ID":"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f","Type":"ContainerStarted","Data":"787723c19c190d4fac37d874923035fd8bfe8e07dacf90f26b6f0c28d23dbfc1"} Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.388191 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" event={"ID":"bcc43ee1-5a5e-4ffc-bda4-06224eb1f10f","Type":"ContainerStarted","Data":"cf527426925e2f4badc719f77664e4ca49c5aa944b23a8cc987ccd9080029d5c"} Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.388494 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.394764 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.415112 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6b98686f74-shbz7" podStartSLOduration=52.415079279 podStartE2EDuration="52.415079279s" podCreationTimestamp="2026-03-20 13:27:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:28:11.411656905 +0000 UTC m=+337.701256938" watchObservedRunningTime="2026-03-20 13:28:11.415079279 +0000 UTC m=+337.704679272" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.425479 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.521495 4690 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.521686 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd" gracePeriod=5 Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.617716 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.644113 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.730466 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.757076 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Mar 20 13:28:11 crc kubenswrapper[4690]: I0320 13:28:11.858630 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.065377 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.143495 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.202663 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.222882 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.229915 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.410015 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.437597 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.441934 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.467658 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.515131 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.575547 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.579688 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.675004 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.727514 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.842135 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.859722 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.907543 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Mar 20 13:28:12 crc kubenswrapper[4690]: I0320 13:28:12.920990 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.141245 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.193607 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.224922 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.273952 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.315229 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.331309 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.358263 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.435863 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.590073 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.687288 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.773731 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.819605 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.825712 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.826208 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.987151 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Mar 20 13:28:13 crc kubenswrapper[4690]: I0320 13:28:13.989507 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Mar 20 13:28:14 crc kubenswrapper[4690]: I0320 13:28:14.081351 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Mar 20 13:28:14 crc kubenswrapper[4690]: I0320 13:28:14.153000 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Mar 20 13:28:14 crc kubenswrapper[4690]: I0320 13:28:14.209279 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Mar 20 13:28:14 crc kubenswrapper[4690]: I0320 13:28:14.290464 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Mar 20 13:28:14 crc kubenswrapper[4690]: I0320 13:28:14.502065 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Mar 20 13:28:14 crc kubenswrapper[4690]: I0320 13:28:14.534998 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Mar 20 13:28:14 crc kubenswrapper[4690]: I0320 13:28:14.562314 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Mar 20 13:28:14 crc kubenswrapper[4690]: I0320 13:28:14.726977 4690 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Mar 20 13:28:14 crc kubenswrapper[4690]: I0320 13:28:14.769032 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Mar 20 13:28:16 crc kubenswrapper[4690]: I0320 13:28:16.408992 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.123393 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.123748 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.251385 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.251480 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.251531 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.251548 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.251610 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.251575 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.251633 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.251733 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.251896 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.252283 4690 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.252331 4690 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.252352 4690 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.252370 4690 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.264091 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.353138 4690 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.427545 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.427600 4690 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd" exitCode=137 Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.427642 4690 scope.go:117] "RemoveContainer" containerID="7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.427729 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.447021 4690 scope.go:117] "RemoveContainer" containerID="7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd" Mar 20 13:28:17 crc kubenswrapper[4690]: E0320 13:28:17.447488 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd\": container with ID starting with 7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd not found: ID does not exist" containerID="7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd" Mar 20 13:28:17 crc kubenswrapper[4690]: I0320 13:28:17.447547 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd"} err="failed to get container status \"7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd\": rpc error: code = NotFound desc = could not find container \"7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd\": container with ID starting with 7bc26191f4b97dbc839398526449ef48624c5143ae422f00f5b98b2a92bf8dfd not found: ID does not exist" Mar 20 13:28:18 crc kubenswrapper[4690]: I0320 13:28:18.426082 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Mar 20 13:28:18 crc kubenswrapper[4690]: I0320 13:28:18.427059 4690 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Mar 20 13:28:18 crc kubenswrapper[4690]: I0320 13:28:18.443958 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Mar 20 13:28:18 crc kubenswrapper[4690]: I0320 13:28:18.444060 4690 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="748c8c90-5eff-4aec-9126-7315244d1b7d" Mar 20 13:28:18 crc kubenswrapper[4690]: I0320 13:28:18.458580 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Mar 20 13:28:18 crc kubenswrapper[4690]: I0320 13:28:18.458648 4690 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="748c8c90-5eff-4aec-9126-7315244d1b7d" Mar 20 13:28:33 crc kubenswrapper[4690]: I0320 13:28:33.558104 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Mar 20 13:28:33 crc kubenswrapper[4690]: I0320 13:28:33.561992 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Mar 20 13:28:33 crc kubenswrapper[4690]: I0320 13:28:33.563015 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Mar 20 13:28:33 crc kubenswrapper[4690]: I0320 13:28:33.563121 4690 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="94af701aaf02b93b607b2656bf0b2ab233c1b7b8f9dab06bdfd555305c4b1c49" exitCode=137 Mar 20 13:28:33 crc kubenswrapper[4690]: I0320 13:28:33.563181 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"94af701aaf02b93b607b2656bf0b2ab233c1b7b8f9dab06bdfd555305c4b1c49"} Mar 20 13:28:33 crc kubenswrapper[4690]: I0320 13:28:33.563243 4690 scope.go:117] "RemoveContainer" containerID="84bd425fa6832a2fd3fe14146d42e84ee3999d424ee81b39f454e696375f62de" Mar 20 13:28:34 crc kubenswrapper[4690]: I0320 13:28:34.573400 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Mar 20 13:28:34 crc kubenswrapper[4690]: I0320 13:28:34.576634 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/cluster-policy-controller/0.log" Mar 20 13:28:34 crc kubenswrapper[4690]: I0320 13:28:34.576701 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9fccb68137ccf7bd1f8eb2fa4566b4cc46ed0e51bf7a0d42677148f95bd7b1bc"} Mar 20 13:28:35 crc kubenswrapper[4690]: I0320 13:28:35.386611 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:28:38 crc kubenswrapper[4690]: I0320 13:28:38.603310 4690 generic.go:334] "Generic (PLEG): container finished" podID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerID="4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d" exitCode=0 Mar 20 13:28:38 crc kubenswrapper[4690]: I0320 13:28:38.603369 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" event={"ID":"617c74e7-0a16-4376-822f-390d3c44c7c5","Type":"ContainerDied","Data":"4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d"} Mar 20 13:28:38 crc kubenswrapper[4690]: I0320 13:28:38.604071 4690 scope.go:117] "RemoveContainer" containerID="4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d" Mar 20 13:28:39 crc kubenswrapper[4690]: I0320 13:28:39.615366 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" event={"ID":"617c74e7-0a16-4376-822f-390d3c44c7c5","Type":"ContainerStarted","Data":"39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9"} Mar 20 13:28:39 crc kubenswrapper[4690]: I0320 13:28:39.617254 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:28:39 crc kubenswrapper[4690]: I0320 13:28:39.620092 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:28:43 crc kubenswrapper[4690]: I0320 13:28:43.169102 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:28:43 crc kubenswrapper[4690]: I0320 13:28:43.174630 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:28:43 crc kubenswrapper[4690]: I0320 13:28:43.309388 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Mar 20 13:28:43 crc kubenswrapper[4690]: I0320 13:28:43.646025 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.059289 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566888-ddt9g"] Mar 20 13:28:54 crc kubenswrapper[4690]: E0320 13:28:54.060161 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.060178 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.060313 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.060790 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566888-ddt9g" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.062668 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.062889 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.064083 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.071093 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566888-ddt9g"] Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.256294 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vh26c\" (UniqueName: \"kubernetes.io/projected/2d326236-875c-464f-b9e7-97f6bc7a7863-kube-api-access-vh26c\") pod \"auto-csr-approver-29566888-ddt9g\" (UID: \"2d326236-875c-464f-b9e7-97f6bc7a7863\") " pod="openshift-infra/auto-csr-approver-29566888-ddt9g" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.357874 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vh26c\" (UniqueName: \"kubernetes.io/projected/2d326236-875c-464f-b9e7-97f6bc7a7863-kube-api-access-vh26c\") pod \"auto-csr-approver-29566888-ddt9g\" (UID: \"2d326236-875c-464f-b9e7-97f6bc7a7863\") " pod="openshift-infra/auto-csr-approver-29566888-ddt9g" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.376825 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vh26c\" (UniqueName: \"kubernetes.io/projected/2d326236-875c-464f-b9e7-97f6bc7a7863-kube-api-access-vh26c\") pod \"auto-csr-approver-29566888-ddt9g\" (UID: \"2d326236-875c-464f-b9e7-97f6bc7a7863\") " pod="openshift-infra/auto-csr-approver-29566888-ddt9g" Mar 20 13:28:54 crc kubenswrapper[4690]: I0320 13:28:54.675596 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566888-ddt9g" Mar 20 13:28:55 crc kubenswrapper[4690]: I0320 13:28:55.081943 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566888-ddt9g"] Mar 20 13:28:55 crc kubenswrapper[4690]: I0320 13:28:55.722427 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566888-ddt9g" event={"ID":"2d326236-875c-464f-b9e7-97f6bc7a7863","Type":"ContainerStarted","Data":"15548ddfd662b2c76f93678391f99b89d9c66424a074f1b831c96572c655d2ac"} Mar 20 13:28:56 crc kubenswrapper[4690]: I0320 13:28:56.728003 4690 generic.go:334] "Generic (PLEG): container finished" podID="2d326236-875c-464f-b9e7-97f6bc7a7863" containerID="e60ccaf4d35642c92ba8799788c4c0aaf62721a93dcba7b05cbef5ab4339ebfe" exitCode=0 Mar 20 13:28:56 crc kubenswrapper[4690]: I0320 13:28:56.728047 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566888-ddt9g" event={"ID":"2d326236-875c-464f-b9e7-97f6bc7a7863","Type":"ContainerDied","Data":"e60ccaf4d35642c92ba8799788c4c0aaf62721a93dcba7b05cbef5ab4339ebfe"} Mar 20 13:28:58 crc kubenswrapper[4690]: I0320 13:28:58.000087 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566888-ddt9g" Mar 20 13:28:58 crc kubenswrapper[4690]: I0320 13:28:58.106061 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vh26c\" (UniqueName: \"kubernetes.io/projected/2d326236-875c-464f-b9e7-97f6bc7a7863-kube-api-access-vh26c\") pod \"2d326236-875c-464f-b9e7-97f6bc7a7863\" (UID: \"2d326236-875c-464f-b9e7-97f6bc7a7863\") " Mar 20 13:28:58 crc kubenswrapper[4690]: I0320 13:28:58.113555 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d326236-875c-464f-b9e7-97f6bc7a7863-kube-api-access-vh26c" (OuterVolumeSpecName: "kube-api-access-vh26c") pod "2d326236-875c-464f-b9e7-97f6bc7a7863" (UID: "2d326236-875c-464f-b9e7-97f6bc7a7863"). InnerVolumeSpecName "kube-api-access-vh26c". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:28:58 crc kubenswrapper[4690]: I0320 13:28:58.210160 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vh26c\" (UniqueName: \"kubernetes.io/projected/2d326236-875c-464f-b9e7-97f6bc7a7863-kube-api-access-vh26c\") on node \"crc\" DevicePath \"\"" Mar 20 13:28:58 crc kubenswrapper[4690]: I0320 13:28:58.739500 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566888-ddt9g" event={"ID":"2d326236-875c-464f-b9e7-97f6bc7a7863","Type":"ContainerDied","Data":"15548ddfd662b2c76f93678391f99b89d9c66424a074f1b831c96572c655d2ac"} Mar 20 13:28:58 crc kubenswrapper[4690]: I0320 13:28:58.739987 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15548ddfd662b2c76f93678391f99b89d9c66424a074f1b831c96572c655d2ac" Mar 20 13:28:58 crc kubenswrapper[4690]: I0320 13:28:58.739614 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566888-ddt9g" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.350981 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fcrdc"] Mar 20 13:29:31 crc kubenswrapper[4690]: E0320 13:29:31.351882 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d326236-875c-464f-b9e7-97f6bc7a7863" containerName="oc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.351905 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d326236-875c-464f-b9e7-97f6bc7a7863" containerName="oc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.352062 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d326236-875c-464f-b9e7-97f6bc7a7863" containerName="oc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.352627 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.371087 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fcrdc"] Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.507213 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.507412 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-trusted-ca\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.507492 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6qzh\" (UniqueName: \"kubernetes.io/projected/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-kube-api-access-s6qzh\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.507570 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-registry-certificates\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.507634 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-registry-tls\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.507706 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.507773 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.507888 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-bound-sa-token\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.528618 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.608666 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-bound-sa-token\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.608742 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.608801 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-trusted-ca\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.608835 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6qzh\" (UniqueName: \"kubernetes.io/projected/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-kube-api-access-s6qzh\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.608901 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-registry-certificates\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.608977 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-registry-tls\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.609068 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.610504 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-ca-trust-extracted\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.612649 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-registry-certificates\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.612648 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-trusted-ca\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.616083 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-registry-tls\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.616167 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-installation-pull-secrets\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.625110 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-bound-sa-token\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.630151 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6qzh\" (UniqueName: \"kubernetes.io/projected/0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5-kube-api-access-s6qzh\") pod \"image-registry-66df7c8f76-fcrdc\" (UID: \"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5\") " pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.675136 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.885715 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-fcrdc"] Mar 20 13:29:31 crc kubenswrapper[4690]: I0320 13:29:31.920127 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" event={"ID":"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5","Type":"ContainerStarted","Data":"6eacd1fe3f6de6ece3cd4fdf707ca8b58c45fb6209b6780ecceb4818d4ad461a"} Mar 20 13:29:32 crc kubenswrapper[4690]: I0320 13:29:32.928283 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" event={"ID":"0fdb10c6-01f6-4760-a6f2-1d5ab35f9cb5","Type":"ContainerStarted","Data":"d5294bc5a6df8259fb9ab7d0e1409fda69f7a5ed4e13af4e6819d0ae82d5c210"} Mar 20 13:29:32 crc kubenswrapper[4690]: I0320 13:29:32.929052 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:32 crc kubenswrapper[4690]: I0320 13:29:32.950973 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" podStartSLOduration=1.9509509139999999 podStartE2EDuration="1.950950914s" podCreationTimestamp="2026-03-20 13:29:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:29:32.947738532 +0000 UTC m=+419.237338495" watchObservedRunningTime="2026-03-20 13:29:32.950950914 +0000 UTC m=+419.240550857" Mar 20 13:29:51 crc kubenswrapper[4690]: I0320 13:29:51.683744 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-fcrdc" Mar 20 13:29:51 crc kubenswrapper[4690]: I0320 13:29:51.764403 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkvkz"] Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.154980 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566890-24sp6"] Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.156743 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566890-24sp6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.160714 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6"] Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.161783 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.163587 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.163735 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.163809 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.165705 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.166198 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.170736 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566890-24sp6"] Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.178117 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6"] Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.298930 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/344f0f29-cf1e-438d-988a-bc5bf28626d3-config-volume\") pod \"collect-profiles-29566890-8phq6\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.299009 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5qg6\" (UniqueName: \"kubernetes.io/projected/21a4a656-82a9-4524-9b96-59374e4523e8-kube-api-access-h5qg6\") pod \"auto-csr-approver-29566890-24sp6\" (UID: \"21a4a656-82a9-4524-9b96-59374e4523e8\") " pod="openshift-infra/auto-csr-approver-29566890-24sp6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.299057 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/344f0f29-cf1e-438d-988a-bc5bf28626d3-secret-volume\") pod \"collect-profiles-29566890-8phq6\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.299080 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn2jg\" (UniqueName: \"kubernetes.io/projected/344f0f29-cf1e-438d-988a-bc5bf28626d3-kube-api-access-tn2jg\") pod \"collect-profiles-29566890-8phq6\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.400365 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/344f0f29-cf1e-438d-988a-bc5bf28626d3-secret-volume\") pod \"collect-profiles-29566890-8phq6\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.400427 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn2jg\" (UniqueName: \"kubernetes.io/projected/344f0f29-cf1e-438d-988a-bc5bf28626d3-kube-api-access-tn2jg\") pod \"collect-profiles-29566890-8phq6\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.400494 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/344f0f29-cf1e-438d-988a-bc5bf28626d3-config-volume\") pod \"collect-profiles-29566890-8phq6\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.400547 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5qg6\" (UniqueName: \"kubernetes.io/projected/21a4a656-82a9-4524-9b96-59374e4523e8-kube-api-access-h5qg6\") pod \"auto-csr-approver-29566890-24sp6\" (UID: \"21a4a656-82a9-4524-9b96-59374e4523e8\") " pod="openshift-infra/auto-csr-approver-29566890-24sp6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.401363 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/344f0f29-cf1e-438d-988a-bc5bf28626d3-config-volume\") pod \"collect-profiles-29566890-8phq6\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.406713 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/344f0f29-cf1e-438d-988a-bc5bf28626d3-secret-volume\") pod \"collect-profiles-29566890-8phq6\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.416547 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn2jg\" (UniqueName: \"kubernetes.io/projected/344f0f29-cf1e-438d-988a-bc5bf28626d3-kube-api-access-tn2jg\") pod \"collect-profiles-29566890-8phq6\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.426314 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5qg6\" (UniqueName: \"kubernetes.io/projected/21a4a656-82a9-4524-9b96-59374e4523e8-kube-api-access-h5qg6\") pod \"auto-csr-approver-29566890-24sp6\" (UID: \"21a4a656-82a9-4524-9b96-59374e4523e8\") " pod="openshift-infra/auto-csr-approver-29566890-24sp6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.482618 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566890-24sp6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.497058 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.691346 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566890-24sp6"] Mar 20 13:30:00 crc kubenswrapper[4690]: I0320 13:30:00.714372 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6"] Mar 20 13:30:00 crc kubenswrapper[4690]: W0320 13:30:00.719839 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod344f0f29_cf1e_438d_988a_bc5bf28626d3.slice/crio-e80acea55e7bea97ca51f99e9ce71c76007ae8ca5d70c487a7a8700fc5c0d06c WatchSource:0}: Error finding container e80acea55e7bea97ca51f99e9ce71c76007ae8ca5d70c487a7a8700fc5c0d06c: Status 404 returned error can't find the container with id e80acea55e7bea97ca51f99e9ce71c76007ae8ca5d70c487a7a8700fc5c0d06c Mar 20 13:30:01 crc kubenswrapper[4690]: I0320 13:30:01.119637 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566890-24sp6" event={"ID":"21a4a656-82a9-4524-9b96-59374e4523e8","Type":"ContainerStarted","Data":"360747eb594ab85354bd955afccf5a9dad70f4f60ea6e93d10f5513f33ca5705"} Mar 20 13:30:01 crc kubenswrapper[4690]: I0320 13:30:01.121167 4690 generic.go:334] "Generic (PLEG): container finished" podID="344f0f29-cf1e-438d-988a-bc5bf28626d3" containerID="43baed54fd693a103e1ee66ea5ec8d29c7e97db30b46299ad8c0a97db5f90558" exitCode=0 Mar 20 13:30:01 crc kubenswrapper[4690]: I0320 13:30:01.121201 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" event={"ID":"344f0f29-cf1e-438d-988a-bc5bf28626d3","Type":"ContainerDied","Data":"43baed54fd693a103e1ee66ea5ec8d29c7e97db30b46299ad8c0a97db5f90558"} Mar 20 13:30:01 crc kubenswrapper[4690]: I0320 13:30:01.121224 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" event={"ID":"344f0f29-cf1e-438d-988a-bc5bf28626d3","Type":"ContainerStarted","Data":"e80acea55e7bea97ca51f99e9ce71c76007ae8ca5d70c487a7a8700fc5c0d06c"} Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.415666 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.528942 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/344f0f29-cf1e-438d-988a-bc5bf28626d3-config-volume\") pod \"344f0f29-cf1e-438d-988a-bc5bf28626d3\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.529035 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/344f0f29-cf1e-438d-988a-bc5bf28626d3-secret-volume\") pod \"344f0f29-cf1e-438d-988a-bc5bf28626d3\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.529056 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn2jg\" (UniqueName: \"kubernetes.io/projected/344f0f29-cf1e-438d-988a-bc5bf28626d3-kube-api-access-tn2jg\") pod \"344f0f29-cf1e-438d-988a-bc5bf28626d3\" (UID: \"344f0f29-cf1e-438d-988a-bc5bf28626d3\") " Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.530309 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/344f0f29-cf1e-438d-988a-bc5bf28626d3-config-volume" (OuterVolumeSpecName: "config-volume") pod "344f0f29-cf1e-438d-988a-bc5bf28626d3" (UID: "344f0f29-cf1e-438d-988a-bc5bf28626d3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.535029 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/344f0f29-cf1e-438d-988a-bc5bf28626d3-kube-api-access-tn2jg" (OuterVolumeSpecName: "kube-api-access-tn2jg") pod "344f0f29-cf1e-438d-988a-bc5bf28626d3" (UID: "344f0f29-cf1e-438d-988a-bc5bf28626d3"). InnerVolumeSpecName "kube-api-access-tn2jg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.535407 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/344f0f29-cf1e-438d-988a-bc5bf28626d3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "344f0f29-cf1e-438d-988a-bc5bf28626d3" (UID: "344f0f29-cf1e-438d-988a-bc5bf28626d3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.630537 4690 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/344f0f29-cf1e-438d-988a-bc5bf28626d3-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.630577 4690 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/344f0f29-cf1e-438d-988a-bc5bf28626d3-secret-volume\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:02 crc kubenswrapper[4690]: I0320 13:30:02.630590 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn2jg\" (UniqueName: \"kubernetes.io/projected/344f0f29-cf1e-438d-988a-bc5bf28626d3-kube-api-access-tn2jg\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:03 crc kubenswrapper[4690]: I0320 13:30:03.133388 4690 generic.go:334] "Generic (PLEG): container finished" podID="21a4a656-82a9-4524-9b96-59374e4523e8" containerID="1e46750da958eb5d82f18e6cfdf3aa39bfb3e63d4ff0d14597b110eb5d81113f" exitCode=0 Mar 20 13:30:03 crc kubenswrapper[4690]: I0320 13:30:03.133590 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566890-24sp6" event={"ID":"21a4a656-82a9-4524-9b96-59374e4523e8","Type":"ContainerDied","Data":"1e46750da958eb5d82f18e6cfdf3aa39bfb3e63d4ff0d14597b110eb5d81113f"} Mar 20 13:30:03 crc kubenswrapper[4690]: I0320 13:30:03.135481 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" event={"ID":"344f0f29-cf1e-438d-988a-bc5bf28626d3","Type":"ContainerDied","Data":"e80acea55e7bea97ca51f99e9ce71c76007ae8ca5d70c487a7a8700fc5c0d06c"} Mar 20 13:30:03 crc kubenswrapper[4690]: I0320 13:30:03.135541 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e80acea55e7bea97ca51f99e9ce71c76007ae8ca5d70c487a7a8700fc5c0d06c" Mar 20 13:30:03 crc kubenswrapper[4690]: I0320 13:30:03.135567 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566890-8phq6" Mar 20 13:30:03 crc kubenswrapper[4690]: I0320 13:30:03.829756 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:30:03 crc kubenswrapper[4690]: I0320 13:30:03.829829 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:30:04 crc kubenswrapper[4690]: I0320 13:30:04.455928 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566890-24sp6" Mar 20 13:30:04 crc kubenswrapper[4690]: I0320 13:30:04.470354 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h5qg6\" (UniqueName: \"kubernetes.io/projected/21a4a656-82a9-4524-9b96-59374e4523e8-kube-api-access-h5qg6\") pod \"21a4a656-82a9-4524-9b96-59374e4523e8\" (UID: \"21a4a656-82a9-4524-9b96-59374e4523e8\") " Mar 20 13:30:04 crc kubenswrapper[4690]: I0320 13:30:04.475262 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21a4a656-82a9-4524-9b96-59374e4523e8-kube-api-access-h5qg6" (OuterVolumeSpecName: "kube-api-access-h5qg6") pod "21a4a656-82a9-4524-9b96-59374e4523e8" (UID: "21a4a656-82a9-4524-9b96-59374e4523e8"). InnerVolumeSpecName "kube-api-access-h5qg6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:04 crc kubenswrapper[4690]: I0320 13:30:04.571442 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h5qg6\" (UniqueName: \"kubernetes.io/projected/21a4a656-82a9-4524-9b96-59374e4523e8-kube-api-access-h5qg6\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:05 crc kubenswrapper[4690]: I0320 13:30:05.150459 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566890-24sp6" event={"ID":"21a4a656-82a9-4524-9b96-59374e4523e8","Type":"ContainerDied","Data":"360747eb594ab85354bd955afccf5a9dad70f4f60ea6e93d10f5513f33ca5705"} Mar 20 13:30:05 crc kubenswrapper[4690]: I0320 13:30:05.150507 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="360747eb594ab85354bd955afccf5a9dad70f4f60ea6e93d10f5513f33ca5705" Mar 20 13:30:05 crc kubenswrapper[4690]: I0320 13:30:05.150524 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566890-24sp6" Mar 20 13:30:16 crc kubenswrapper[4690]: I0320 13:30:16.815578 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" podUID="704eed42-1e9b-4d8c-be9f-4d237658ae86" containerName="registry" containerID="cri-o://acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e" gracePeriod=30 Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.154714 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.227944 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-certificates\") pod \"704eed42-1e9b-4d8c-be9f-4d237658ae86\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.227998 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/704eed42-1e9b-4d8c-be9f-4d237658ae86-installation-pull-secrets\") pod \"704eed42-1e9b-4d8c-be9f-4d237658ae86\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.228088 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/704eed42-1e9b-4d8c-be9f-4d237658ae86-ca-trust-extracted\") pod \"704eed42-1e9b-4d8c-be9f-4d237658ae86\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.228125 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-tls\") pod \"704eed42-1e9b-4d8c-be9f-4d237658ae86\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.228199 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-trusted-ca\") pod \"704eed42-1e9b-4d8c-be9f-4d237658ae86\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.228222 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-bound-sa-token\") pod \"704eed42-1e9b-4d8c-be9f-4d237658ae86\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.228249 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8m7d\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-kube-api-access-r8m7d\") pod \"704eed42-1e9b-4d8c-be9f-4d237658ae86\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.228384 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"704eed42-1e9b-4d8c-be9f-4d237658ae86\" (UID: \"704eed42-1e9b-4d8c-be9f-4d237658ae86\") " Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.229290 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "704eed42-1e9b-4d8c-be9f-4d237658ae86" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.229405 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "704eed42-1e9b-4d8c-be9f-4d237658ae86" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.229768 4690 generic.go:334] "Generic (PLEG): container finished" podID="704eed42-1e9b-4d8c-be9f-4d237658ae86" containerID="acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e" exitCode=0 Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.229812 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" event={"ID":"704eed42-1e9b-4d8c-be9f-4d237658ae86","Type":"ContainerDied","Data":"acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e"} Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.229860 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" event={"ID":"704eed42-1e9b-4d8c-be9f-4d237658ae86","Type":"ContainerDied","Data":"d23cb3c7f2664c0101ee3f9cd6d7f0aae4873e20be5117566c305e99812d4f53"} Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.229881 4690 scope.go:117] "RemoveContainer" containerID="acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.229976 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-pkvkz" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.235087 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "704eed42-1e9b-4d8c-be9f-4d237658ae86" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.235869 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/704eed42-1e9b-4d8c-be9f-4d237658ae86-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "704eed42-1e9b-4d8c-be9f-4d237658ae86" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.238456 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-kube-api-access-r8m7d" (OuterVolumeSpecName: "kube-api-access-r8m7d") pod "704eed42-1e9b-4d8c-be9f-4d237658ae86" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86"). InnerVolumeSpecName "kube-api-access-r8m7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.239107 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "704eed42-1e9b-4d8c-be9f-4d237658ae86" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.246263 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/704eed42-1e9b-4d8c-be9f-4d237658ae86-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "704eed42-1e9b-4d8c-be9f-4d237658ae86" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.246920 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "704eed42-1e9b-4d8c-be9f-4d237658ae86" (UID: "704eed42-1e9b-4d8c-be9f-4d237658ae86"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.288940 4690 scope.go:117] "RemoveContainer" containerID="acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e" Mar 20 13:30:17 crc kubenswrapper[4690]: E0320 13:30:17.289398 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e\": container with ID starting with acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e not found: ID does not exist" containerID="acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.289436 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e"} err="failed to get container status \"acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e\": rpc error: code = NotFound desc = could not find container \"acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e\": container with ID starting with acc9e7b0eabbb8217805d9997990b279147d502fe42aac4d213343be119b999e not found: ID does not exist" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.330248 4690 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/704eed42-1e9b-4d8c-be9f-4d237658ae86-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.330288 4690 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-tls\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.330301 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.330315 4690 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-bound-sa-token\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.330326 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8m7d\" (UniqueName: \"kubernetes.io/projected/704eed42-1e9b-4d8c-be9f-4d237658ae86-kube-api-access-r8m7d\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.330340 4690 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/704eed42-1e9b-4d8c-be9f-4d237658ae86-registry-certificates\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.330350 4690 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/704eed42-1e9b-4d8c-be9f-4d237658ae86-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.584240 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkvkz"] Mar 20 13:30:17 crc kubenswrapper[4690]: I0320 13:30:17.592831 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-pkvkz"] Mar 20 13:30:18 crc kubenswrapper[4690]: I0320 13:30:18.425994 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="704eed42-1e9b-4d8c-be9f-4d237658ae86" path="/var/lib/kubelet/pods/704eed42-1e9b-4d8c-be9f-4d237658ae86/volumes" Mar 20 13:30:33 crc kubenswrapper[4690]: I0320 13:30:33.829506 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:30:33 crc kubenswrapper[4690]: I0320 13:30:33.830080 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.432168 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2485h"] Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.433266 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2485h" podUID="62e83612-6289-48a8-a3bb-4488048279f7" containerName="registry-server" containerID="cri-o://d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040" gracePeriod=30 Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.445104 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4rnwq"] Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.445377 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4rnwq" podUID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerName="registry-server" containerID="cri-o://b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6" gracePeriod=30 Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.461067 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5mwrz"] Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.461360 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" containerID="cri-o://39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9" gracePeriod=30 Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.473387 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-snbxt"] Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.473673 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-snbxt" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerName="registry-server" containerID="cri-o://e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc" gracePeriod=30 Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.480902 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-klwmc"] Mar 20 13:30:50 crc kubenswrapper[4690]: E0320 13:30:50.481128 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21a4a656-82a9-4524-9b96-59374e4523e8" containerName="oc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.481143 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="21a4a656-82a9-4524-9b96-59374e4523e8" containerName="oc" Mar 20 13:30:50 crc kubenswrapper[4690]: E0320 13:30:50.481157 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="704eed42-1e9b-4d8c-be9f-4d237658ae86" containerName="registry" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.481163 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="704eed42-1e9b-4d8c-be9f-4d237658ae86" containerName="registry" Mar 20 13:30:50 crc kubenswrapper[4690]: E0320 13:30:50.481177 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="344f0f29-cf1e-438d-988a-bc5bf28626d3" containerName="collect-profiles" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.481183 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="344f0f29-cf1e-438d-988a-bc5bf28626d3" containerName="collect-profiles" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.481283 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="344f0f29-cf1e-438d-988a-bc5bf28626d3" containerName="collect-profiles" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.481296 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="21a4a656-82a9-4524-9b96-59374e4523e8" containerName="oc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.481307 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="704eed42-1e9b-4d8c-be9f-4d237658ae86" containerName="registry" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.481706 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.487631 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zwzf4"] Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.487990 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zwzf4" podUID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerName="registry-server" containerID="cri-o://82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a" gracePeriod=30 Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.492051 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-klwmc"] Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.678822 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/479d5dc0-0f18-4083-88d8-e07327096950-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-klwmc\" (UID: \"479d5dc0-0f18-4083-88d8-e07327096950\") " pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.680933 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/479d5dc0-0f18-4083-88d8-e07327096950-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-klwmc\" (UID: \"479d5dc0-0f18-4083-88d8-e07327096950\") " pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.681012 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfxgm\" (UniqueName: \"kubernetes.io/projected/479d5dc0-0f18-4083-88d8-e07327096950-kube-api-access-jfxgm\") pod \"marketplace-operator-79b997595-klwmc\" (UID: \"479d5dc0-0f18-4083-88d8-e07327096950\") " pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.782489 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/479d5dc0-0f18-4083-88d8-e07327096950-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-klwmc\" (UID: \"479d5dc0-0f18-4083-88d8-e07327096950\") " pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.782579 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/479d5dc0-0f18-4083-88d8-e07327096950-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-klwmc\" (UID: \"479d5dc0-0f18-4083-88d8-e07327096950\") " pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.782606 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfxgm\" (UniqueName: \"kubernetes.io/projected/479d5dc0-0f18-4083-88d8-e07327096950-kube-api-access-jfxgm\") pod \"marketplace-operator-79b997595-klwmc\" (UID: \"479d5dc0-0f18-4083-88d8-e07327096950\") " pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.783919 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/479d5dc0-0f18-4083-88d8-e07327096950-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-klwmc\" (UID: \"479d5dc0-0f18-4083-88d8-e07327096950\") " pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.790838 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/479d5dc0-0f18-4083-88d8-e07327096950-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-klwmc\" (UID: \"479d5dc0-0f18-4083-88d8-e07327096950\") " pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.798895 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfxgm\" (UniqueName: \"kubernetes.io/projected/479d5dc0-0f18-4083-88d8-e07327096950-kube-api-access-jfxgm\") pod \"marketplace-operator-79b997595-klwmc\" (UID: \"479d5dc0-0f18-4083-88d8-e07327096950\") " pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.801593 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.949151 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.956530 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.960805 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.970905 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:30:50 crc kubenswrapper[4690]: I0320 13:30:50.980347 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.070589 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-klwmc"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.084752 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-catalog-content\") pod \"62e83612-6289-48a8-a3bb-4488048279f7\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.084823 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-operator-metrics\") pod \"617c74e7-0a16-4376-822f-390d3c44c7c5\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.084905 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bsrgj\" (UniqueName: \"kubernetes.io/projected/617c74e7-0a16-4376-822f-390d3c44c7c5-kube-api-access-bsrgj\") pod \"617c74e7-0a16-4376-822f-390d3c44c7c5\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.084947 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hppjq\" (UniqueName: \"kubernetes.io/projected/62e83612-6289-48a8-a3bb-4488048279f7-kube-api-access-hppjq\") pod \"62e83612-6289-48a8-a3bb-4488048279f7\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.084968 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-utilities\") pod \"62e83612-6289-48a8-a3bb-4488048279f7\" (UID: \"62e83612-6289-48a8-a3bb-4488048279f7\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085021 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-utilities\") pod \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085042 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-catalog-content\") pod \"3978f4ce-bf05-41c1-b941-c5927fec1785\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085063 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-catalog-content\") pod \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085094 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5z69\" (UniqueName: \"kubernetes.io/projected/3978f4ce-bf05-41c1-b941-c5927fec1785-kube-api-access-x5z69\") pod \"3978f4ce-bf05-41c1-b941-c5927fec1785\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085196 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w78vg\" (UniqueName: \"kubernetes.io/projected/c91a8b76-7263-4b29-ac22-b1459fe1f35b-kube-api-access-w78vg\") pod \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085215 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-trusted-ca\") pod \"617c74e7-0a16-4376-822f-390d3c44c7c5\" (UID: \"617c74e7-0a16-4376-822f-390d3c44c7c5\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085233 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-catalog-content\") pod \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\" (UID: \"c91a8b76-7263-4b29-ac22-b1459fe1f35b\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085250 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7qh7\" (UniqueName: \"kubernetes.io/projected/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-kube-api-access-l7qh7\") pod \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085275 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-utilities\") pod \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\" (UID: \"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.085303 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-utilities\") pod \"3978f4ce-bf05-41c1-b941-c5927fec1785\" (UID: \"3978f4ce-bf05-41c1-b941-c5927fec1785\") " Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.086139 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-utilities" (OuterVolumeSpecName: "utilities") pod "c91a8b76-7263-4b29-ac22-b1459fe1f35b" (UID: "c91a8b76-7263-4b29-ac22-b1459fe1f35b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.086228 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "617c74e7-0a16-4376-822f-390d3c44c7c5" (UID: "617c74e7-0a16-4376-822f-390d3c44c7c5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.086392 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-utilities" (OuterVolumeSpecName: "utilities") pod "62e83612-6289-48a8-a3bb-4488048279f7" (UID: "62e83612-6289-48a8-a3bb-4488048279f7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.087331 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-utilities" (OuterVolumeSpecName: "utilities") pod "bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" (UID: "bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.087862 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-utilities" (OuterVolumeSpecName: "utilities") pod "3978f4ce-bf05-41c1-b941-c5927fec1785" (UID: "3978f4ce-bf05-41c1-b941-c5927fec1785"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.089153 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3978f4ce-bf05-41c1-b941-c5927fec1785-kube-api-access-x5z69" (OuterVolumeSpecName: "kube-api-access-x5z69") pod "3978f4ce-bf05-41c1-b941-c5927fec1785" (UID: "3978f4ce-bf05-41c1-b941-c5927fec1785"). InnerVolumeSpecName "kube-api-access-x5z69". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.089172 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-kube-api-access-l7qh7" (OuterVolumeSpecName: "kube-api-access-l7qh7") pod "bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" (UID: "bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6"). InnerVolumeSpecName "kube-api-access-l7qh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.089281 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "617c74e7-0a16-4376-822f-390d3c44c7c5" (UID: "617c74e7-0a16-4376-822f-390d3c44c7c5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.089371 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62e83612-6289-48a8-a3bb-4488048279f7-kube-api-access-hppjq" (OuterVolumeSpecName: "kube-api-access-hppjq") pod "62e83612-6289-48a8-a3bb-4488048279f7" (UID: "62e83612-6289-48a8-a3bb-4488048279f7"). InnerVolumeSpecName "kube-api-access-hppjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.100033 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c91a8b76-7263-4b29-ac22-b1459fe1f35b-kube-api-access-w78vg" (OuterVolumeSpecName: "kube-api-access-w78vg") pod "c91a8b76-7263-4b29-ac22-b1459fe1f35b" (UID: "c91a8b76-7263-4b29-ac22-b1459fe1f35b"). InnerVolumeSpecName "kube-api-access-w78vg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.107809 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/617c74e7-0a16-4376-822f-390d3c44c7c5-kube-api-access-bsrgj" (OuterVolumeSpecName: "kube-api-access-bsrgj") pod "617c74e7-0a16-4376-822f-390d3c44c7c5" (UID: "617c74e7-0a16-4376-822f-390d3c44c7c5"). InnerVolumeSpecName "kube-api-access-bsrgj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.125954 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c91a8b76-7263-4b29-ac22-b1459fe1f35b" (UID: "c91a8b76-7263-4b29-ac22-b1459fe1f35b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.143336 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" (UID: "bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.149565 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62e83612-6289-48a8-a3bb-4488048279f7" (UID: "62e83612-6289-48a8-a3bb-4488048279f7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.186943 4690 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.186975 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w78vg\" (UniqueName: \"kubernetes.io/projected/c91a8b76-7263-4b29-ac22-b1459fe1f35b-kube-api-access-w78vg\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.186984 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.186993 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7qh7\" (UniqueName: \"kubernetes.io/projected/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-kube-api-access-l7qh7\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187003 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187013 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187021 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187030 4690 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/617c74e7-0a16-4376-822f-390d3c44c7c5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187039 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bsrgj\" (UniqueName: \"kubernetes.io/projected/617c74e7-0a16-4376-822f-390d3c44c7c5-kube-api-access-bsrgj\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187047 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hppjq\" (UniqueName: \"kubernetes.io/projected/62e83612-6289-48a8-a3bb-4488048279f7-kube-api-access-hppjq\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187055 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e83612-6289-48a8-a3bb-4488048279f7-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187062 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c91a8b76-7263-4b29-ac22-b1459fe1f35b-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187070 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.187079 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5z69\" (UniqueName: \"kubernetes.io/projected/3978f4ce-bf05-41c1-b941-c5927fec1785-kube-api-access-x5z69\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.240783 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3978f4ce-bf05-41c1-b941-c5927fec1785" (UID: "3978f4ce-bf05-41c1-b941-c5927fec1785"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.288218 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3978f4ce-bf05-41c1-b941-c5927fec1785-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.464571 4690 generic.go:334] "Generic (PLEG): container finished" podID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerID="b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6" exitCode=0 Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.464634 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rnwq" event={"ID":"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6","Type":"ContainerDied","Data":"b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.464664 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4rnwq" event={"ID":"bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6","Type":"ContainerDied","Data":"a808e10e40f08713ff7e100081d503aa1f94b80ff4b5d0d2714b5918bba76780"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.464684 4690 scope.go:117] "RemoveContainer" containerID="b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.464823 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4rnwq" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.469186 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" event={"ID":"479d5dc0-0f18-4083-88d8-e07327096950","Type":"ContainerStarted","Data":"5324fa25a7ff3fbf12c9168672e1c706aa6d57966e55e1a81f08610edd2de2b2"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.469254 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" event={"ID":"479d5dc0-0f18-4083-88d8-e07327096950","Type":"ContainerStarted","Data":"dfa6edb944c5b0471671151506330a01a7a956c01ee67df62689391241f65e9f"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.469576 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.475589 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.476944 4690 generic.go:334] "Generic (PLEG): container finished" podID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerID="39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9" exitCode=0 Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.477026 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" event={"ID":"617c74e7-0a16-4376-822f-390d3c44c7c5","Type":"ContainerDied","Data":"39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.477069 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.477080 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-5mwrz" event={"ID":"617c74e7-0a16-4376-822f-390d3c44c7c5","Type":"ContainerDied","Data":"e8648d2cf5042c3afe37038af79793247700fad78d315463827f7c345ee4acc7"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.485713 4690 scope.go:117] "RemoveContainer" containerID="fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.487916 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zwzf4" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.487971 4690 generic.go:334] "Generic (PLEG): container finished" podID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerID="82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a" exitCode=0 Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.488038 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwzf4" event={"ID":"3978f4ce-bf05-41c1-b941-c5927fec1785","Type":"ContainerDied","Data":"82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.488085 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zwzf4" event={"ID":"3978f4ce-bf05-41c1-b941-c5927fec1785","Type":"ContainerDied","Data":"dbd01863dc5009d4a6e9c463300b352f3980ac292a1676b9e153d5e3b44010ae"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.492261 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-klwmc" podStartSLOduration=1.492251557 podStartE2EDuration="1.492251557s" podCreationTimestamp="2026-03-20 13:30:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:30:51.491229721 +0000 UTC m=+497.780829674" watchObservedRunningTime="2026-03-20 13:30:51.492251557 +0000 UTC m=+497.781851500" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.497869 4690 generic.go:334] "Generic (PLEG): container finished" podID="62e83612-6289-48a8-a3bb-4488048279f7" containerID="d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040" exitCode=0 Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.497952 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2485h" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.497967 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2485h" event={"ID":"62e83612-6289-48a8-a3bb-4488048279f7","Type":"ContainerDied","Data":"d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.498985 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2485h" event={"ID":"62e83612-6289-48a8-a3bb-4488048279f7","Type":"ContainerDied","Data":"001bed812baae8e53f7fa68d4f8c93a78f36eef624dfb13c115d32bb0a62b0f4"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.501346 4690 generic.go:334] "Generic (PLEG): container finished" podID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerID="e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc" exitCode=0 Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.501407 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-snbxt" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.501428 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-snbxt" event={"ID":"c91a8b76-7263-4b29-ac22-b1459fe1f35b","Type":"ContainerDied","Data":"e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.501579 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-snbxt" event={"ID":"c91a8b76-7263-4b29-ac22-b1459fe1f35b","Type":"ContainerDied","Data":"b5eb9c24353c805594f74d0a945c5b452cbc25415ee55e3e6eb68d9709a52694"} Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.528630 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4rnwq"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.529791 4690 scope.go:117] "RemoveContainer" containerID="23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.535190 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4rnwq"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.544866 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zwzf4"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.551074 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zwzf4"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.556216 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5mwrz"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.561597 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-5mwrz"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.562321 4690 scope.go:117] "RemoveContainer" containerID="b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.563158 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6\": container with ID starting with b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6 not found: ID does not exist" containerID="b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.563197 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6"} err="failed to get container status \"b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6\": rpc error: code = NotFound desc = could not find container \"b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6\": container with ID starting with b24ebe374db89117cc14661b703e897e6f34b9d9cfa3ca013b565c08677c36a6 not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.563222 4690 scope.go:117] "RemoveContainer" containerID="fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.565058 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89\": container with ID starting with fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89 not found: ID does not exist" containerID="fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.565142 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89"} err="failed to get container status \"fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89\": rpc error: code = NotFound desc = could not find container \"fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89\": container with ID starting with fe9b4ad895ec41b6addeae164d2d05d3b551dd6cf038f21e5a3d3f4306c9cd89 not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.565171 4690 scope.go:117] "RemoveContainer" containerID="23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.565649 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76\": container with ID starting with 23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76 not found: ID does not exist" containerID="23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.565702 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76"} err="failed to get container status \"23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76\": rpc error: code = NotFound desc = could not find container \"23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76\": container with ID starting with 23025361afccc1295d4cb324473656850cc9bb312faf74b4c760b94bc83b9d76 not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.565736 4690 scope.go:117] "RemoveContainer" containerID="39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.569306 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-snbxt"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.587129 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-snbxt"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.589176 4690 scope.go:117] "RemoveContainer" containerID="4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.591124 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2485h"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.593962 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2485h"] Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.613677 4690 scope.go:117] "RemoveContainer" containerID="39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.614085 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9\": container with ID starting with 39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9 not found: ID does not exist" containerID="39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.614114 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9"} err="failed to get container status \"39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9\": rpc error: code = NotFound desc = could not find container \"39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9\": container with ID starting with 39dd5d168d75d2e41bacafe83a4f08f68677db9b0c28fb5a7be032af82cca8d9 not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.614139 4690 scope.go:117] "RemoveContainer" containerID="4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.614488 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d\": container with ID starting with 4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d not found: ID does not exist" containerID="4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.614512 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d"} err="failed to get container status \"4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d\": rpc error: code = NotFound desc = could not find container \"4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d\": container with ID starting with 4036c53af8cd8326d387abdcd6230d23d1a8a64f66969bc0ea065a9c01c4ce9d not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.614529 4690 scope.go:117] "RemoveContainer" containerID="82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.626820 4690 scope.go:117] "RemoveContainer" containerID="f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.642349 4690 scope.go:117] "RemoveContainer" containerID="437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.653868 4690 scope.go:117] "RemoveContainer" containerID="82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.654318 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a\": container with ID starting with 82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a not found: ID does not exist" containerID="82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.654368 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a"} err="failed to get container status \"82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a\": rpc error: code = NotFound desc = could not find container \"82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a\": container with ID starting with 82542d2c1633cd072ce37cb8d7348fef0c61180149e89f4eb2c91af45219052a not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.654399 4690 scope.go:117] "RemoveContainer" containerID="f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.654717 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115\": container with ID starting with f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115 not found: ID does not exist" containerID="f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.654750 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115"} err="failed to get container status \"f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115\": rpc error: code = NotFound desc = could not find container \"f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115\": container with ID starting with f5fa8f970679169253417a9a43489697d1ce3b5480a8dea551253f26dce4a115 not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.654779 4690 scope.go:117] "RemoveContainer" containerID="437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.655174 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d\": container with ID starting with 437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d not found: ID does not exist" containerID="437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.655198 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d"} err="failed to get container status \"437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d\": rpc error: code = NotFound desc = could not find container \"437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d\": container with ID starting with 437c9159e45de2a9bd8c70f1a7043ad736c03b9aa47d1ab7436bfe43bcbf732d not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.655212 4690 scope.go:117] "RemoveContainer" containerID="d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.667430 4690 scope.go:117] "RemoveContainer" containerID="085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.689967 4690 scope.go:117] "RemoveContainer" containerID="ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.706971 4690 scope.go:117] "RemoveContainer" containerID="d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.707399 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040\": container with ID starting with d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040 not found: ID does not exist" containerID="d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.707439 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040"} err="failed to get container status \"d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040\": rpc error: code = NotFound desc = could not find container \"d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040\": container with ID starting with d60a98f47ea9b00ea3e0d89e32368ab540bf914d91abf659746cad5fb2a1a040 not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.707472 4690 scope.go:117] "RemoveContainer" containerID="085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.707895 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b\": container with ID starting with 085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b not found: ID does not exist" containerID="085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.707921 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b"} err="failed to get container status \"085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b\": rpc error: code = NotFound desc = could not find container \"085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b\": container with ID starting with 085cc9ec9821de5dbe7660aaa01b142d12f1602caaaffeab35e695225ffed92b not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.707936 4690 scope.go:117] "RemoveContainer" containerID="ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.708259 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b\": container with ID starting with ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b not found: ID does not exist" containerID="ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.708282 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b"} err="failed to get container status \"ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b\": rpc error: code = NotFound desc = could not find container \"ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b\": container with ID starting with ab73e25dd0dbab3d24d261ec375d3e3a4baab0e7728121941b91e2bfd720732b not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.708298 4690 scope.go:117] "RemoveContainer" containerID="e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.719788 4690 scope.go:117] "RemoveContainer" containerID="0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.730702 4690 scope.go:117] "RemoveContainer" containerID="cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.741690 4690 scope.go:117] "RemoveContainer" containerID="e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.742124 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc\": container with ID starting with e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc not found: ID does not exist" containerID="e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.742152 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc"} err="failed to get container status \"e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc\": rpc error: code = NotFound desc = could not find container \"e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc\": container with ID starting with e90204be00dccd6085f493f8889d547d206e12992f17791dd876499f837d9dfc not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.742173 4690 scope.go:117] "RemoveContainer" containerID="0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.742395 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548\": container with ID starting with 0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548 not found: ID does not exist" containerID="0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.742423 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548"} err="failed to get container status \"0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548\": rpc error: code = NotFound desc = could not find container \"0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548\": container with ID starting with 0e36b4cd8a6aac31530e58b59e0f333181efff621e1b67fab13cde4fafa61548 not found: ID does not exist" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.742441 4690 scope.go:117] "RemoveContainer" containerID="cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6" Mar 20 13:30:51 crc kubenswrapper[4690]: E0320 13:30:51.742688 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6\": container with ID starting with cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6 not found: ID does not exist" containerID="cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6" Mar 20 13:30:51 crc kubenswrapper[4690]: I0320 13:30:51.742714 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6"} err="failed to get container status \"cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6\": rpc error: code = NotFound desc = could not find container \"cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6\": container with ID starting with cc42c7f05cdfe95a8bac1ff774d3a808c00a4d1cf0ec3d3e0389e5d83baa9fe6 not found: ID does not exist" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250511 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-j6rqn"] Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250686 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerName="extract-content" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250697 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerName="extract-content" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250707 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e83612-6289-48a8-a3bb-4488048279f7" containerName="extract-content" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250713 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e83612-6289-48a8-a3bb-4488048279f7" containerName="extract-content" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250724 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250731 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250739 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250746 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250752 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e83612-6289-48a8-a3bb-4488048279f7" containerName="extract-utilities" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250757 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e83612-6289-48a8-a3bb-4488048279f7" containerName="extract-utilities" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250767 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerName="extract-utilities" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250773 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerName="extract-utilities" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250783 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerName="extract-content" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250789 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerName="extract-content" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250796 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250802 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250810 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerName="extract-utilities" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250816 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerName="extract-utilities" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250822 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250827 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250835 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250853 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250863 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerName="extract-utilities" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250868 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerName="extract-utilities" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250877 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerName="extract-content" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250883 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerName="extract-content" Mar 20 13:30:52 crc kubenswrapper[4690]: E0320 13:30:52.250893 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e83612-6289-48a8-a3bb-4488048279f7" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250899 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e83612-6289-48a8-a3bb-4488048279f7" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250977 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.250989 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.251000 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="62e83612-6289-48a8-a3bb-4488048279f7" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.251014 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.251024 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="3978f4ce-bf05-41c1-b941-c5927fec1785" containerName="registry-server" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.251174 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" containerName="marketplace-operator" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.251674 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.255473 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.263012 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j6rqn"] Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.403111 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6451112-5e01-4830-b37e-d898546035d6-catalog-content\") pod \"certified-operators-j6rqn\" (UID: \"c6451112-5e01-4830-b37e-d898546035d6\") " pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.403215 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kmcrp\" (UniqueName: \"kubernetes.io/projected/c6451112-5e01-4830-b37e-d898546035d6-kube-api-access-kmcrp\") pod \"certified-operators-j6rqn\" (UID: \"c6451112-5e01-4830-b37e-d898546035d6\") " pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.403243 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6451112-5e01-4830-b37e-d898546035d6-utilities\") pod \"certified-operators-j6rqn\" (UID: \"c6451112-5e01-4830-b37e-d898546035d6\") " pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.426126 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3978f4ce-bf05-41c1-b941-c5927fec1785" path="/var/lib/kubelet/pods/3978f4ce-bf05-41c1-b941-c5927fec1785/volumes" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.427466 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="617c74e7-0a16-4376-822f-390d3c44c7c5" path="/var/lib/kubelet/pods/617c74e7-0a16-4376-822f-390d3c44c7c5/volumes" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.428949 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62e83612-6289-48a8-a3bb-4488048279f7" path="/var/lib/kubelet/pods/62e83612-6289-48a8-a3bb-4488048279f7/volumes" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.431577 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6" path="/var/lib/kubelet/pods/bc8e24ad-a7e4-4f43-ab9c-c2e7365456e6/volumes" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.432990 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c91a8b76-7263-4b29-ac22-b1459fe1f35b" path="/var/lib/kubelet/pods/c91a8b76-7263-4b29-ac22-b1459fe1f35b/volumes" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.504364 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kmcrp\" (UniqueName: \"kubernetes.io/projected/c6451112-5e01-4830-b37e-d898546035d6-kube-api-access-kmcrp\") pod \"certified-operators-j6rqn\" (UID: \"c6451112-5e01-4830-b37e-d898546035d6\") " pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.504444 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6451112-5e01-4830-b37e-d898546035d6-utilities\") pod \"certified-operators-j6rqn\" (UID: \"c6451112-5e01-4830-b37e-d898546035d6\") " pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.504537 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6451112-5e01-4830-b37e-d898546035d6-catalog-content\") pod \"certified-operators-j6rqn\" (UID: \"c6451112-5e01-4830-b37e-d898546035d6\") " pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.506329 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6451112-5e01-4830-b37e-d898546035d6-utilities\") pod \"certified-operators-j6rqn\" (UID: \"c6451112-5e01-4830-b37e-d898546035d6\") " pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.506616 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6451112-5e01-4830-b37e-d898546035d6-catalog-content\") pod \"certified-operators-j6rqn\" (UID: \"c6451112-5e01-4830-b37e-d898546035d6\") " pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.530154 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kmcrp\" (UniqueName: \"kubernetes.io/projected/c6451112-5e01-4830-b37e-d898546035d6-kube-api-access-kmcrp\") pod \"certified-operators-j6rqn\" (UID: \"c6451112-5e01-4830-b37e-d898546035d6\") " pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.573246 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:30:52 crc kubenswrapper[4690]: I0320 13:30:52.788287 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-j6rqn"] Mar 20 13:30:52 crc kubenswrapper[4690]: W0320 13:30:52.796108 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc6451112_5e01_4830_b37e_d898546035d6.slice/crio-389f331f96597b2e21c0650b136ee3893421152338e05a576ad534f7410df8b3 WatchSource:0}: Error finding container 389f331f96597b2e21c0650b136ee3893421152338e05a576ad534f7410df8b3: Status 404 returned error can't find the container with id 389f331f96597b2e21c0650b136ee3893421152338e05a576ad534f7410df8b3 Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.246559 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jghhb"] Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.249010 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.251361 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.254772 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jghhb"] Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.418555 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74adca7d-849a-45af-9236-00ff6a15a294-utilities\") pod \"redhat-marketplace-jghhb\" (UID: \"74adca7d-849a-45af-9236-00ff6a15a294\") " pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.418626 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74adca7d-849a-45af-9236-00ff6a15a294-catalog-content\") pod \"redhat-marketplace-jghhb\" (UID: \"74adca7d-849a-45af-9236-00ff6a15a294\") " pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.418770 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4nlv\" (UniqueName: \"kubernetes.io/projected/74adca7d-849a-45af-9236-00ff6a15a294-kube-api-access-q4nlv\") pod \"redhat-marketplace-jghhb\" (UID: \"74adca7d-849a-45af-9236-00ff6a15a294\") " pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.520546 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4nlv\" (UniqueName: \"kubernetes.io/projected/74adca7d-849a-45af-9236-00ff6a15a294-kube-api-access-q4nlv\") pod \"redhat-marketplace-jghhb\" (UID: \"74adca7d-849a-45af-9236-00ff6a15a294\") " pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.520693 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74adca7d-849a-45af-9236-00ff6a15a294-utilities\") pod \"redhat-marketplace-jghhb\" (UID: \"74adca7d-849a-45af-9236-00ff6a15a294\") " pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.520769 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74adca7d-849a-45af-9236-00ff6a15a294-catalog-content\") pod \"redhat-marketplace-jghhb\" (UID: \"74adca7d-849a-45af-9236-00ff6a15a294\") " pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.521281 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/74adca7d-849a-45af-9236-00ff6a15a294-utilities\") pod \"redhat-marketplace-jghhb\" (UID: \"74adca7d-849a-45af-9236-00ff6a15a294\") " pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.521477 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/74adca7d-849a-45af-9236-00ff6a15a294-catalog-content\") pod \"redhat-marketplace-jghhb\" (UID: \"74adca7d-849a-45af-9236-00ff6a15a294\") " pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.534623 4690 generic.go:334] "Generic (PLEG): container finished" podID="c6451112-5e01-4830-b37e-d898546035d6" containerID="a16e730228a9289c0743919a47705ae0748db65b92234ecfd6d7f5b30a175792" exitCode=0 Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.534733 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j6rqn" event={"ID":"c6451112-5e01-4830-b37e-d898546035d6","Type":"ContainerDied","Data":"a16e730228a9289c0743919a47705ae0748db65b92234ecfd6d7f5b30a175792"} Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.534776 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j6rqn" event={"ID":"c6451112-5e01-4830-b37e-d898546035d6","Type":"ContainerStarted","Data":"389f331f96597b2e21c0650b136ee3893421152338e05a576ad534f7410df8b3"} Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.549328 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4nlv\" (UniqueName: \"kubernetes.io/projected/74adca7d-849a-45af-9236-00ff6a15a294-kube-api-access-q4nlv\") pod \"redhat-marketplace-jghhb\" (UID: \"74adca7d-849a-45af-9236-00ff6a15a294\") " pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.575364 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:30:53 crc kubenswrapper[4690]: I0320 13:30:53.757943 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jghhb"] Mar 20 13:30:53 crc kubenswrapper[4690]: W0320 13:30:53.764201 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74adca7d_849a_45af_9236_00ff6a15a294.slice/crio-bb7e8b2265f58847d78c52c1c04f03549ee68d5f61a5e9e35694c5e81180ce42 WatchSource:0}: Error finding container bb7e8b2265f58847d78c52c1c04f03549ee68d5f61a5e9e35694c5e81180ce42: Status 404 returned error can't find the container with id bb7e8b2265f58847d78c52c1c04f03549ee68d5f61a5e9e35694c5e81180ce42 Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.544871 4690 generic.go:334] "Generic (PLEG): container finished" podID="74adca7d-849a-45af-9236-00ff6a15a294" containerID="432c058fdf419c0e1e625ed68fd41f22a7ddc0a6299a39554dfe55d647eb0bf6" exitCode=0 Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.544977 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jghhb" event={"ID":"74adca7d-849a-45af-9236-00ff6a15a294","Type":"ContainerDied","Data":"432c058fdf419c0e1e625ed68fd41f22a7ddc0a6299a39554dfe55d647eb0bf6"} Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.545395 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jghhb" event={"ID":"74adca7d-849a-45af-9236-00ff6a15a294","Type":"ContainerStarted","Data":"bb7e8b2265f58847d78c52c1c04f03549ee68d5f61a5e9e35694c5e81180ce42"} Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.647540 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-dkm4r"] Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.648825 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.651346 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.663001 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dkm4r"] Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.839296 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa-catalog-content\") pod \"redhat-operators-dkm4r\" (UID: \"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa\") " pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.839540 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa-utilities\") pod \"redhat-operators-dkm4r\" (UID: \"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa\") " pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.839571 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndv8x\" (UniqueName: \"kubernetes.io/projected/9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa-kube-api-access-ndv8x\") pod \"redhat-operators-dkm4r\" (UID: \"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa\") " pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.940930 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa-utilities\") pod \"redhat-operators-dkm4r\" (UID: \"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa\") " pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.940992 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndv8x\" (UniqueName: \"kubernetes.io/projected/9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa-kube-api-access-ndv8x\") pod \"redhat-operators-dkm4r\" (UID: \"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa\") " pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.941017 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa-catalog-content\") pod \"redhat-operators-dkm4r\" (UID: \"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa\") " pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.941663 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa-catalog-content\") pod \"redhat-operators-dkm4r\" (UID: \"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa\") " pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.941761 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa-utilities\") pod \"redhat-operators-dkm4r\" (UID: \"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa\") " pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.961205 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndv8x\" (UniqueName: \"kubernetes.io/projected/9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa-kube-api-access-ndv8x\") pod \"redhat-operators-dkm4r\" (UID: \"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa\") " pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:54 crc kubenswrapper[4690]: I0320 13:30:54.970313 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.165588 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-dkm4r"] Mar 20 13:30:55 crc kubenswrapper[4690]: W0320 13:30:55.176755 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f3b3f2f_c465_42b4_9e5e_eebdb270bcaa.slice/crio-e2749c2975c9a4ebcedf0eb0b6392f5d866296ef49696e2f9ded960ec2cd37d8 WatchSource:0}: Error finding container e2749c2975c9a4ebcedf0eb0b6392f5d866296ef49696e2f9ded960ec2cd37d8: Status 404 returned error can't find the container with id e2749c2975c9a4ebcedf0eb0b6392f5d866296ef49696e2f9ded960ec2cd37d8 Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.555106 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jghhb" event={"ID":"74adca7d-849a-45af-9236-00ff6a15a294","Type":"ContainerStarted","Data":"9e93c02a9c160f0ff4c32011a4ffc7606125ae3dfed24226dcadcdb258d84464"} Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.557347 4690 generic.go:334] "Generic (PLEG): container finished" podID="c6451112-5e01-4830-b37e-d898546035d6" containerID="1263015ef1bd303fcdb475c330890385bc379bfafef9a605e0e5d2866113a2dc" exitCode=0 Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.557414 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j6rqn" event={"ID":"c6451112-5e01-4830-b37e-d898546035d6","Type":"ContainerDied","Data":"1263015ef1bd303fcdb475c330890385bc379bfafef9a605e0e5d2866113a2dc"} Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.559326 4690 generic.go:334] "Generic (PLEG): container finished" podID="9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa" containerID="38b50152347a67d7740c2cda4c96a926f1a5b068f2dd83a5d2f68d6a79122072" exitCode=0 Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.559372 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkm4r" event={"ID":"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa","Type":"ContainerDied","Data":"38b50152347a67d7740c2cda4c96a926f1a5b068f2dd83a5d2f68d6a79122072"} Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.559396 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkm4r" event={"ID":"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa","Type":"ContainerStarted","Data":"e2749c2975c9a4ebcedf0eb0b6392f5d866296ef49696e2f9ded960ec2cd37d8"} Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.643628 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-fz7fb"] Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.644882 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.646828 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.652597 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fz7fb"] Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.749644 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afd7fa14-3170-4c3e-9370-80bcbe52e69f-catalog-content\") pod \"community-operators-fz7fb\" (UID: \"afd7fa14-3170-4c3e-9370-80bcbe52e69f\") " pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.749708 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2f9v\" (UniqueName: \"kubernetes.io/projected/afd7fa14-3170-4c3e-9370-80bcbe52e69f-kube-api-access-q2f9v\") pod \"community-operators-fz7fb\" (UID: \"afd7fa14-3170-4c3e-9370-80bcbe52e69f\") " pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.749746 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afd7fa14-3170-4c3e-9370-80bcbe52e69f-utilities\") pod \"community-operators-fz7fb\" (UID: \"afd7fa14-3170-4c3e-9370-80bcbe52e69f\") " pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.851456 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afd7fa14-3170-4c3e-9370-80bcbe52e69f-catalog-content\") pod \"community-operators-fz7fb\" (UID: \"afd7fa14-3170-4c3e-9370-80bcbe52e69f\") " pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.851745 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2f9v\" (UniqueName: \"kubernetes.io/projected/afd7fa14-3170-4c3e-9370-80bcbe52e69f-kube-api-access-q2f9v\") pod \"community-operators-fz7fb\" (UID: \"afd7fa14-3170-4c3e-9370-80bcbe52e69f\") " pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.852101 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/afd7fa14-3170-4c3e-9370-80bcbe52e69f-catalog-content\") pod \"community-operators-fz7fb\" (UID: \"afd7fa14-3170-4c3e-9370-80bcbe52e69f\") " pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.853695 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afd7fa14-3170-4c3e-9370-80bcbe52e69f-utilities\") pod \"community-operators-fz7fb\" (UID: \"afd7fa14-3170-4c3e-9370-80bcbe52e69f\") " pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.853773 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/afd7fa14-3170-4c3e-9370-80bcbe52e69f-utilities\") pod \"community-operators-fz7fb\" (UID: \"afd7fa14-3170-4c3e-9370-80bcbe52e69f\") " pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:55 crc kubenswrapper[4690]: I0320 13:30:55.878668 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2f9v\" (UniqueName: \"kubernetes.io/projected/afd7fa14-3170-4c3e-9370-80bcbe52e69f-kube-api-access-q2f9v\") pod \"community-operators-fz7fb\" (UID: \"afd7fa14-3170-4c3e-9370-80bcbe52e69f\") " pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.015350 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.219003 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-fz7fb"] Mar 20 13:30:56 crc kubenswrapper[4690]: W0320 13:30:56.231243 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafd7fa14_3170_4c3e_9370_80bcbe52e69f.slice/crio-cc89e02c138377a9afe7a89cc947eebbbb35f704fda7a585d5a0cdaa498f6468 WatchSource:0}: Error finding container cc89e02c138377a9afe7a89cc947eebbbb35f704fda7a585d5a0cdaa498f6468: Status 404 returned error can't find the container with id cc89e02c138377a9afe7a89cc947eebbbb35f704fda7a585d5a0cdaa498f6468 Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.567404 4690 generic.go:334] "Generic (PLEG): container finished" podID="74adca7d-849a-45af-9236-00ff6a15a294" containerID="9e93c02a9c160f0ff4c32011a4ffc7606125ae3dfed24226dcadcdb258d84464" exitCode=0 Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.567476 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jghhb" event={"ID":"74adca7d-849a-45af-9236-00ff6a15a294","Type":"ContainerDied","Data":"9e93c02a9c160f0ff4c32011a4ffc7606125ae3dfed24226dcadcdb258d84464"} Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.570584 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-j6rqn" event={"ID":"c6451112-5e01-4830-b37e-d898546035d6","Type":"ContainerStarted","Data":"e44161724b27da17e3962c16178355c9a3c100e562894e70324f16c6e559eff3"} Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.572975 4690 generic.go:334] "Generic (PLEG): container finished" podID="afd7fa14-3170-4c3e-9370-80bcbe52e69f" containerID="a164807b53812877d37d5e20ddd0046edec750b573a807d26a7b71f549dc83c9" exitCode=0 Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.573083 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fz7fb" event={"ID":"afd7fa14-3170-4c3e-9370-80bcbe52e69f","Type":"ContainerDied","Data":"a164807b53812877d37d5e20ddd0046edec750b573a807d26a7b71f549dc83c9"} Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.573153 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fz7fb" event={"ID":"afd7fa14-3170-4c3e-9370-80bcbe52e69f","Type":"ContainerStarted","Data":"cc89e02c138377a9afe7a89cc947eebbbb35f704fda7a585d5a0cdaa498f6468"} Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.575693 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkm4r" event={"ID":"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa","Type":"ContainerStarted","Data":"c5949253565f451e90ce0cc1ac08062946f6ad7516bf40645f543401bb917f5f"} Mar 20 13:30:56 crc kubenswrapper[4690]: I0320 13:30:56.610911 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-j6rqn" podStartSLOduration=2.074870537 podStartE2EDuration="4.610889058s" podCreationTimestamp="2026-03-20 13:30:52 +0000 UTC" firstStartedPulling="2026-03-20 13:30:53.536414491 +0000 UTC m=+499.826014464" lastFinishedPulling="2026-03-20 13:30:56.072433042 +0000 UTC m=+502.362032985" observedRunningTime="2026-03-20 13:30:56.604583366 +0000 UTC m=+502.894183309" watchObservedRunningTime="2026-03-20 13:30:56.610889058 +0000 UTC m=+502.900488991" Mar 20 13:30:57 crc kubenswrapper[4690]: I0320 13:30:57.583264 4690 generic.go:334] "Generic (PLEG): container finished" podID="9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa" containerID="c5949253565f451e90ce0cc1ac08062946f6ad7516bf40645f543401bb917f5f" exitCode=0 Mar 20 13:30:57 crc kubenswrapper[4690]: I0320 13:30:57.583360 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkm4r" event={"ID":"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa","Type":"ContainerDied","Data":"c5949253565f451e90ce0cc1ac08062946f6ad7516bf40645f543401bb917f5f"} Mar 20 13:30:57 crc kubenswrapper[4690]: I0320 13:30:57.587445 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jghhb" event={"ID":"74adca7d-849a-45af-9236-00ff6a15a294","Type":"ContainerStarted","Data":"207c2e347c13e484fdfac75df413e40a0b48c5047853ad3f00fdc1374a5ab22b"} Mar 20 13:30:57 crc kubenswrapper[4690]: I0320 13:30:57.625805 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jghhb" podStartSLOduration=2.123882488 podStartE2EDuration="4.625781905s" podCreationTimestamp="2026-03-20 13:30:53 +0000 UTC" firstStartedPulling="2026-03-20 13:30:54.546207536 +0000 UTC m=+500.835807469" lastFinishedPulling="2026-03-20 13:30:57.048106943 +0000 UTC m=+503.337706886" observedRunningTime="2026-03-20 13:30:57.624361288 +0000 UTC m=+503.913961251" watchObservedRunningTime="2026-03-20 13:30:57.625781905 +0000 UTC m=+503.915381858" Mar 20 13:30:58 crc kubenswrapper[4690]: I0320 13:30:58.597311 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-dkm4r" event={"ID":"9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa","Type":"ContainerStarted","Data":"8b5e620957da2b1848d569052be923ea6bff53e112225b2d9d76b9a65277f6e8"} Mar 20 13:30:58 crc kubenswrapper[4690]: I0320 13:30:58.619586 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-dkm4r" podStartSLOduration=2.145433216 podStartE2EDuration="4.619563959s" podCreationTimestamp="2026-03-20 13:30:54 +0000 UTC" firstStartedPulling="2026-03-20 13:30:55.560985961 +0000 UTC m=+501.850585904" lastFinishedPulling="2026-03-20 13:30:58.035116704 +0000 UTC m=+504.324716647" observedRunningTime="2026-03-20 13:30:58.613302729 +0000 UTC m=+504.902902672" watchObservedRunningTime="2026-03-20 13:30:58.619563959 +0000 UTC m=+504.909163912" Mar 20 13:31:01 crc kubenswrapper[4690]: I0320 13:31:01.614245 4690 generic.go:334] "Generic (PLEG): container finished" podID="afd7fa14-3170-4c3e-9370-80bcbe52e69f" containerID="407a872d8b3d41fb6ea40f29e7cc5a39534ac17c01ff0dfd316b08e7c473f784" exitCode=0 Mar 20 13:31:01 crc kubenswrapper[4690]: I0320 13:31:01.614339 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fz7fb" event={"ID":"afd7fa14-3170-4c3e-9370-80bcbe52e69f","Type":"ContainerDied","Data":"407a872d8b3d41fb6ea40f29e7cc5a39534ac17c01ff0dfd316b08e7c473f784"} Mar 20 13:31:02 crc kubenswrapper[4690]: I0320 13:31:02.573726 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:31:02 crc kubenswrapper[4690]: I0320 13:31:02.573764 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:31:02 crc kubenswrapper[4690]: I0320 13:31:02.615786 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:31:02 crc kubenswrapper[4690]: I0320 13:31:02.622104 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-fz7fb" event={"ID":"afd7fa14-3170-4c3e-9370-80bcbe52e69f","Type":"ContainerStarted","Data":"f889411efc670b41068c857d20778b7971b1884a7c104d934fcff8a2c12b21a3"} Mar 20 13:31:02 crc kubenswrapper[4690]: I0320 13:31:02.662040 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-j6rqn" Mar 20 13:31:02 crc kubenswrapper[4690]: I0320 13:31:02.665176 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-fz7fb" podStartSLOduration=2.167052467 podStartE2EDuration="7.665152749s" podCreationTimestamp="2026-03-20 13:30:55 +0000 UTC" firstStartedPulling="2026-03-20 13:30:56.576632818 +0000 UTC m=+502.866232771" lastFinishedPulling="2026-03-20 13:31:02.0747331 +0000 UTC m=+508.364333053" observedRunningTime="2026-03-20 13:31:02.664165334 +0000 UTC m=+508.953765277" watchObservedRunningTime="2026-03-20 13:31:02.665152749 +0000 UTC m=+508.954752702" Mar 20 13:31:03 crc kubenswrapper[4690]: I0320 13:31:03.576594 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:31:03 crc kubenswrapper[4690]: I0320 13:31:03.576820 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:31:03 crc kubenswrapper[4690]: I0320 13:31:03.640636 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:31:03 crc kubenswrapper[4690]: I0320 13:31:03.694308 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jghhb" Mar 20 13:31:03 crc kubenswrapper[4690]: I0320 13:31:03.829698 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:31:03 crc kubenswrapper[4690]: I0320 13:31:03.829751 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:31:03 crc kubenswrapper[4690]: I0320 13:31:03.829786 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:31:03 crc kubenswrapper[4690]: I0320 13:31:03.830294 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"401f09af9f70f34d1e8ddfc73dfb7eb16961c2dcf3c7b9645411a300c57fc579"} pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 13:31:03 crc kubenswrapper[4690]: I0320 13:31:03.830341 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" containerID="cri-o://401f09af9f70f34d1e8ddfc73dfb7eb16961c2dcf3c7b9645411a300c57fc579" gracePeriod=600 Mar 20 13:31:04 crc kubenswrapper[4690]: I0320 13:31:04.635650 4690 generic.go:334] "Generic (PLEG): container finished" podID="60ded650-b298-4115-8286-8969b94d4062" containerID="401f09af9f70f34d1e8ddfc73dfb7eb16961c2dcf3c7b9645411a300c57fc579" exitCode=0 Mar 20 13:31:04 crc kubenswrapper[4690]: I0320 13:31:04.635722 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerDied","Data":"401f09af9f70f34d1e8ddfc73dfb7eb16961c2dcf3c7b9645411a300c57fc579"} Mar 20 13:31:04 crc kubenswrapper[4690]: I0320 13:31:04.636228 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"967fda2103017b22f1f6e626bb333638e17f0d3e154429ee6d859d0e073cb2cc"} Mar 20 13:31:04 crc kubenswrapper[4690]: I0320 13:31:04.636247 4690 scope.go:117] "RemoveContainer" containerID="f0f81414534d6305e46f2721f07056f80da6626c83b19e36747fca71d1b0961d" Mar 20 13:31:04 crc kubenswrapper[4690]: I0320 13:31:04.971288 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:31:04 crc kubenswrapper[4690]: I0320 13:31:04.971364 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:31:05 crc kubenswrapper[4690]: I0320 13:31:05.019053 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:31:05 crc kubenswrapper[4690]: I0320 13:31:05.705608 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-dkm4r" Mar 20 13:31:06 crc kubenswrapper[4690]: I0320 13:31:06.015811 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:31:06 crc kubenswrapper[4690]: I0320 13:31:06.015881 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:31:06 crc kubenswrapper[4690]: I0320 13:31:06.064429 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:31:16 crc kubenswrapper[4690]: I0320 13:31:16.062843 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-fz7fb" Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.150944 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566892-mhg5j"] Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.155297 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566892-mhg5j" Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.157757 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566892-mhg5j"] Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.158885 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.159777 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.160199 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.279886 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbzxk\" (UniqueName: \"kubernetes.io/projected/7806c869-764a-4b3e-b440-e7b7a5db5581-kube-api-access-qbzxk\") pod \"auto-csr-approver-29566892-mhg5j\" (UID: \"7806c869-764a-4b3e-b440-e7b7a5db5581\") " pod="openshift-infra/auto-csr-approver-29566892-mhg5j" Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.381236 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbzxk\" (UniqueName: \"kubernetes.io/projected/7806c869-764a-4b3e-b440-e7b7a5db5581-kube-api-access-qbzxk\") pod \"auto-csr-approver-29566892-mhg5j\" (UID: \"7806c869-764a-4b3e-b440-e7b7a5db5581\") " pod="openshift-infra/auto-csr-approver-29566892-mhg5j" Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.405531 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbzxk\" (UniqueName: \"kubernetes.io/projected/7806c869-764a-4b3e-b440-e7b7a5db5581-kube-api-access-qbzxk\") pod \"auto-csr-approver-29566892-mhg5j\" (UID: \"7806c869-764a-4b3e-b440-e7b7a5db5581\") " pod="openshift-infra/auto-csr-approver-29566892-mhg5j" Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.479126 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566892-mhg5j" Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.863583 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566892-mhg5j"] Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.872448 4690 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 13:32:00 crc kubenswrapper[4690]: I0320 13:32:00.977911 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566892-mhg5j" event={"ID":"7806c869-764a-4b3e-b440-e7b7a5db5581","Type":"ContainerStarted","Data":"1a24bf67712d561ff060da23ce035f9dd6afeacaff4fa8b49b949d1da3571574"} Mar 20 13:32:02 crc kubenswrapper[4690]: I0320 13:32:02.989035 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566892-mhg5j" event={"ID":"7806c869-764a-4b3e-b440-e7b7a5db5581","Type":"ContainerStarted","Data":"5443a303b38eaf1240b93dae1a7ae7db9e3107f1eed33c36f6b4073cc2e8f5dd"} Mar 20 13:32:03 crc kubenswrapper[4690]: I0320 13:32:03.003399 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29566892-mhg5j" podStartSLOduration=1.2521971459999999 podStartE2EDuration="3.003381905s" podCreationTimestamp="2026-03-20 13:32:00 +0000 UTC" firstStartedPulling="2026-03-20 13:32:00.872003951 +0000 UTC m=+567.161603934" lastFinishedPulling="2026-03-20 13:32:02.62318875 +0000 UTC m=+568.912788693" observedRunningTime="2026-03-20 13:32:03.003054376 +0000 UTC m=+569.292654319" watchObservedRunningTime="2026-03-20 13:32:03.003381905 +0000 UTC m=+569.292981848" Mar 20 13:32:03 crc kubenswrapper[4690]: I0320 13:32:03.994706 4690 generic.go:334] "Generic (PLEG): container finished" podID="7806c869-764a-4b3e-b440-e7b7a5db5581" containerID="5443a303b38eaf1240b93dae1a7ae7db9e3107f1eed33c36f6b4073cc2e8f5dd" exitCode=0 Mar 20 13:32:03 crc kubenswrapper[4690]: I0320 13:32:03.994761 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566892-mhg5j" event={"ID":"7806c869-764a-4b3e-b440-e7b7a5db5581","Type":"ContainerDied","Data":"5443a303b38eaf1240b93dae1a7ae7db9e3107f1eed33c36f6b4073cc2e8f5dd"} Mar 20 13:32:05 crc kubenswrapper[4690]: I0320 13:32:05.238115 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566892-mhg5j" Mar 20 13:32:05 crc kubenswrapper[4690]: I0320 13:32:05.246606 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbzxk\" (UniqueName: \"kubernetes.io/projected/7806c869-764a-4b3e-b440-e7b7a5db5581-kube-api-access-qbzxk\") pod \"7806c869-764a-4b3e-b440-e7b7a5db5581\" (UID: \"7806c869-764a-4b3e-b440-e7b7a5db5581\") " Mar 20 13:32:05 crc kubenswrapper[4690]: I0320 13:32:05.252995 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7806c869-764a-4b3e-b440-e7b7a5db5581-kube-api-access-qbzxk" (OuterVolumeSpecName: "kube-api-access-qbzxk") pod "7806c869-764a-4b3e-b440-e7b7a5db5581" (UID: "7806c869-764a-4b3e-b440-e7b7a5db5581"). InnerVolumeSpecName "kube-api-access-qbzxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:32:05 crc kubenswrapper[4690]: I0320 13:32:05.348180 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbzxk\" (UniqueName: \"kubernetes.io/projected/7806c869-764a-4b3e-b440-e7b7a5db5581-kube-api-access-qbzxk\") on node \"crc\" DevicePath \"\"" Mar 20 13:32:06 crc kubenswrapper[4690]: I0320 13:32:06.011034 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566892-mhg5j" event={"ID":"7806c869-764a-4b3e-b440-e7b7a5db5581","Type":"ContainerDied","Data":"1a24bf67712d561ff060da23ce035f9dd6afeacaff4fa8b49b949d1da3571574"} Mar 20 13:32:06 crc kubenswrapper[4690]: I0320 13:32:06.011087 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a24bf67712d561ff060da23ce035f9dd6afeacaff4fa8b49b949d1da3571574" Mar 20 13:32:06 crc kubenswrapper[4690]: I0320 13:32:06.011151 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566892-mhg5j" Mar 20 13:32:06 crc kubenswrapper[4690]: I0320 13:32:06.062077 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566886-cp8l8"] Mar 20 13:32:06 crc kubenswrapper[4690]: I0320 13:32:06.065341 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566886-cp8l8"] Mar 20 13:32:06 crc kubenswrapper[4690]: I0320 13:32:06.425186 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6db1d803-f871-41d2-b6a7-0b3456af1ddf" path="/var/lib/kubelet/pods/6db1d803-f871-41d2-b6a7-0b3456af1ddf/volumes" Mar 20 13:33:33 crc kubenswrapper[4690]: I0320 13:33:33.830361 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:33:33 crc kubenswrapper[4690]: I0320 13:33:33.831150 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:33:42 crc kubenswrapper[4690]: I0320 13:33:42.683367 4690 scope.go:117] "RemoveContainer" containerID="7eec703138e85bdbc467294840668e4439643069915bcf24fee54923fde3f973" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.155659 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566894-zcjr2"] Mar 20 13:34:00 crc kubenswrapper[4690]: E0320 13:34:00.156657 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7806c869-764a-4b3e-b440-e7b7a5db5581" containerName="oc" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.156678 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7806c869-764a-4b3e-b440-e7b7a5db5581" containerName="oc" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.156915 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="7806c869-764a-4b3e-b440-e7b7a5db5581" containerName="oc" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.157659 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566894-zcjr2" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.161547 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.161594 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.162062 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.180299 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566894-zcjr2"] Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.259733 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhm7m\" (UniqueName: \"kubernetes.io/projected/ed808ce2-4c53-47b1-83c0-fb041145f034-kube-api-access-vhm7m\") pod \"auto-csr-approver-29566894-zcjr2\" (UID: \"ed808ce2-4c53-47b1-83c0-fb041145f034\") " pod="openshift-infra/auto-csr-approver-29566894-zcjr2" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.361640 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhm7m\" (UniqueName: \"kubernetes.io/projected/ed808ce2-4c53-47b1-83c0-fb041145f034-kube-api-access-vhm7m\") pod \"auto-csr-approver-29566894-zcjr2\" (UID: \"ed808ce2-4c53-47b1-83c0-fb041145f034\") " pod="openshift-infra/auto-csr-approver-29566894-zcjr2" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.390429 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhm7m\" (UniqueName: \"kubernetes.io/projected/ed808ce2-4c53-47b1-83c0-fb041145f034-kube-api-access-vhm7m\") pod \"auto-csr-approver-29566894-zcjr2\" (UID: \"ed808ce2-4c53-47b1-83c0-fb041145f034\") " pod="openshift-infra/auto-csr-approver-29566894-zcjr2" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.484020 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566894-zcjr2" Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.750140 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566894-zcjr2"] Mar 20 13:34:00 crc kubenswrapper[4690]: I0320 13:34:00.778730 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566894-zcjr2" event={"ID":"ed808ce2-4c53-47b1-83c0-fb041145f034","Type":"ContainerStarted","Data":"e82216ee0678f44bd549cafd82b0a536aa03747ef34d03c6fa6eafd9ae0c7fa5"} Mar 20 13:34:03 crc kubenswrapper[4690]: I0320 13:34:03.798584 4690 generic.go:334] "Generic (PLEG): container finished" podID="ed808ce2-4c53-47b1-83c0-fb041145f034" containerID="4dece95a6e6f34d35d1da45af685042a97fc9d0db7c61e7bb42cd283eb45edee" exitCode=0 Mar 20 13:34:03 crc kubenswrapper[4690]: I0320 13:34:03.798809 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566894-zcjr2" event={"ID":"ed808ce2-4c53-47b1-83c0-fb041145f034","Type":"ContainerDied","Data":"4dece95a6e6f34d35d1da45af685042a97fc9d0db7c61e7bb42cd283eb45edee"} Mar 20 13:34:03 crc kubenswrapper[4690]: I0320 13:34:03.830519 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:34:03 crc kubenswrapper[4690]: I0320 13:34:03.830608 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:34:05 crc kubenswrapper[4690]: I0320 13:34:05.089150 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566894-zcjr2" Mar 20 13:34:05 crc kubenswrapper[4690]: I0320 13:34:05.226596 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhm7m\" (UniqueName: \"kubernetes.io/projected/ed808ce2-4c53-47b1-83c0-fb041145f034-kube-api-access-vhm7m\") pod \"ed808ce2-4c53-47b1-83c0-fb041145f034\" (UID: \"ed808ce2-4c53-47b1-83c0-fb041145f034\") " Mar 20 13:34:05 crc kubenswrapper[4690]: I0320 13:34:05.236104 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed808ce2-4c53-47b1-83c0-fb041145f034-kube-api-access-vhm7m" (OuterVolumeSpecName: "kube-api-access-vhm7m") pod "ed808ce2-4c53-47b1-83c0-fb041145f034" (UID: "ed808ce2-4c53-47b1-83c0-fb041145f034"). InnerVolumeSpecName "kube-api-access-vhm7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:34:05 crc kubenswrapper[4690]: I0320 13:34:05.328073 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhm7m\" (UniqueName: \"kubernetes.io/projected/ed808ce2-4c53-47b1-83c0-fb041145f034-kube-api-access-vhm7m\") on node \"crc\" DevicePath \"\"" Mar 20 13:34:05 crc kubenswrapper[4690]: I0320 13:34:05.816178 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566894-zcjr2" event={"ID":"ed808ce2-4c53-47b1-83c0-fb041145f034","Type":"ContainerDied","Data":"e82216ee0678f44bd549cafd82b0a536aa03747ef34d03c6fa6eafd9ae0c7fa5"} Mar 20 13:34:05 crc kubenswrapper[4690]: I0320 13:34:05.816234 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e82216ee0678f44bd549cafd82b0a536aa03747ef34d03c6fa6eafd9ae0c7fa5" Mar 20 13:34:05 crc kubenswrapper[4690]: I0320 13:34:05.816246 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566894-zcjr2" Mar 20 13:34:06 crc kubenswrapper[4690]: I0320 13:34:06.156792 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566888-ddt9g"] Mar 20 13:34:06 crc kubenswrapper[4690]: I0320 13:34:06.160277 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566888-ddt9g"] Mar 20 13:34:06 crc kubenswrapper[4690]: I0320 13:34:06.424897 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d326236-875c-464f-b9e7-97f6bc7a7863" path="/var/lib/kubelet/pods/2d326236-875c-464f-b9e7-97f6bc7a7863/volumes" Mar 20 13:34:33 crc kubenswrapper[4690]: I0320 13:34:33.830008 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:34:33 crc kubenswrapper[4690]: I0320 13:34:33.830552 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:34:33 crc kubenswrapper[4690]: I0320 13:34:33.830612 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:34:33 crc kubenswrapper[4690]: I0320 13:34:33.831344 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"967fda2103017b22f1f6e626bb333638e17f0d3e154429ee6d859d0e073cb2cc"} pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 13:34:33 crc kubenswrapper[4690]: I0320 13:34:33.831439 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" containerID="cri-o://967fda2103017b22f1f6e626bb333638e17f0d3e154429ee6d859d0e073cb2cc" gracePeriod=600 Mar 20 13:34:34 crc kubenswrapper[4690]: I0320 13:34:34.094564 4690 generic.go:334] "Generic (PLEG): container finished" podID="60ded650-b298-4115-8286-8969b94d4062" containerID="967fda2103017b22f1f6e626bb333638e17f0d3e154429ee6d859d0e073cb2cc" exitCode=0 Mar 20 13:34:34 crc kubenswrapper[4690]: I0320 13:34:34.094770 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerDied","Data":"967fda2103017b22f1f6e626bb333638e17f0d3e154429ee6d859d0e073cb2cc"} Mar 20 13:34:34 crc kubenswrapper[4690]: I0320 13:34:34.095025 4690 scope.go:117] "RemoveContainer" containerID="401f09af9f70f34d1e8ddfc73dfb7eb16961c2dcf3c7b9645411a300c57fc579" Mar 20 13:34:35 crc kubenswrapper[4690]: I0320 13:34:35.104282 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"a92e622601d5d0cbc8d5ec8266b1e5ffd0ed3023dc04d14e7b0e5bdc6a68783b"} Mar 20 13:35:42 crc kubenswrapper[4690]: I0320 13:35:42.767112 4690 scope.go:117] "RemoveContainer" containerID="e60ccaf4d35642c92ba8799788c4c0aaf62721a93dcba7b05cbef5ab4339ebfe" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.150703 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566896-kj56m"] Mar 20 13:36:00 crc kubenswrapper[4690]: E0320 13:36:00.152957 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed808ce2-4c53-47b1-83c0-fb041145f034" containerName="oc" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.153107 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed808ce2-4c53-47b1-83c0-fb041145f034" containerName="oc" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.153401 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed808ce2-4c53-47b1-83c0-fb041145f034" containerName="oc" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.154322 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566896-kj56m" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.161439 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566896-kj56m"] Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.177379 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.177652 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.177672 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.246368 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bls7w\" (UniqueName: \"kubernetes.io/projected/19b3591a-cb0d-4249-968a-06e6c9891eb1-kube-api-access-bls7w\") pod \"auto-csr-approver-29566896-kj56m\" (UID: \"19b3591a-cb0d-4249-968a-06e6c9891eb1\") " pod="openshift-infra/auto-csr-approver-29566896-kj56m" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.347766 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bls7w\" (UniqueName: \"kubernetes.io/projected/19b3591a-cb0d-4249-968a-06e6c9891eb1-kube-api-access-bls7w\") pod \"auto-csr-approver-29566896-kj56m\" (UID: \"19b3591a-cb0d-4249-968a-06e6c9891eb1\") " pod="openshift-infra/auto-csr-approver-29566896-kj56m" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.374553 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bls7w\" (UniqueName: \"kubernetes.io/projected/19b3591a-cb0d-4249-968a-06e6c9891eb1-kube-api-access-bls7w\") pod \"auto-csr-approver-29566896-kj56m\" (UID: \"19b3591a-cb0d-4249-968a-06e6c9891eb1\") " pod="openshift-infra/auto-csr-approver-29566896-kj56m" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.499990 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566896-kj56m" Mar 20 13:36:00 crc kubenswrapper[4690]: I0320 13:36:00.765586 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566896-kj56m"] Mar 20 13:36:01 crc kubenswrapper[4690]: I0320 13:36:01.688394 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566896-kj56m" event={"ID":"19b3591a-cb0d-4249-968a-06e6c9891eb1","Type":"ContainerStarted","Data":"b510583de368d814c665c0fe71132ba107ad5dd57d9f98bde48c3d8a439c17de"} Mar 20 13:36:02 crc kubenswrapper[4690]: I0320 13:36:02.697195 4690 generic.go:334] "Generic (PLEG): container finished" podID="19b3591a-cb0d-4249-968a-06e6c9891eb1" containerID="6186b1025faebc9e90fb5ca0f0e37acaca84549a637abaa45ddd86af1f503d2e" exitCode=0 Mar 20 13:36:02 crc kubenswrapper[4690]: I0320 13:36:02.697290 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566896-kj56m" event={"ID":"19b3591a-cb0d-4249-968a-06e6c9891eb1","Type":"ContainerDied","Data":"6186b1025faebc9e90fb5ca0f0e37acaca84549a637abaa45ddd86af1f503d2e"} Mar 20 13:36:03 crc kubenswrapper[4690]: I0320 13:36:03.988222 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566896-kj56m" Mar 20 13:36:04 crc kubenswrapper[4690]: I0320 13:36:04.105505 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bls7w\" (UniqueName: \"kubernetes.io/projected/19b3591a-cb0d-4249-968a-06e6c9891eb1-kube-api-access-bls7w\") pod \"19b3591a-cb0d-4249-968a-06e6c9891eb1\" (UID: \"19b3591a-cb0d-4249-968a-06e6c9891eb1\") " Mar 20 13:36:04 crc kubenswrapper[4690]: I0320 13:36:04.119257 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19b3591a-cb0d-4249-968a-06e6c9891eb1-kube-api-access-bls7w" (OuterVolumeSpecName: "kube-api-access-bls7w") pod "19b3591a-cb0d-4249-968a-06e6c9891eb1" (UID: "19b3591a-cb0d-4249-968a-06e6c9891eb1"). InnerVolumeSpecName "kube-api-access-bls7w". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:36:04 crc kubenswrapper[4690]: I0320 13:36:04.210356 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bls7w\" (UniqueName: \"kubernetes.io/projected/19b3591a-cb0d-4249-968a-06e6c9891eb1-kube-api-access-bls7w\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:04 crc kubenswrapper[4690]: I0320 13:36:04.720895 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566896-kj56m" event={"ID":"19b3591a-cb0d-4249-968a-06e6c9891eb1","Type":"ContainerDied","Data":"b510583de368d814c665c0fe71132ba107ad5dd57d9f98bde48c3d8a439c17de"} Mar 20 13:36:04 crc kubenswrapper[4690]: I0320 13:36:04.720954 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b510583de368d814c665c0fe71132ba107ad5dd57d9f98bde48c3d8a439c17de" Mar 20 13:36:04 crc kubenswrapper[4690]: I0320 13:36:04.721031 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566896-kj56m" Mar 20 13:36:05 crc kubenswrapper[4690]: I0320 13:36:05.059060 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566890-24sp6"] Mar 20 13:36:05 crc kubenswrapper[4690]: I0320 13:36:05.064172 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566890-24sp6"] Mar 20 13:36:06 crc kubenswrapper[4690]: I0320 13:36:06.420091 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="21a4a656-82a9-4524-9b96-59374e4523e8" path="/var/lib/kubelet/pods/21a4a656-82a9-4524-9b96-59374e4523e8/volumes" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.000655 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq"] Mar 20 13:36:08 crc kubenswrapper[4690]: E0320 13:36:08.000946 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19b3591a-cb0d-4249-968a-06e6c9891eb1" containerName="oc" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.000961 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="19b3591a-cb0d-4249-968a-06e6c9891eb1" containerName="oc" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.001087 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="19b3591a-cb0d-4249-968a-06e6c9891eb1" containerName="oc" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.001519 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.005457 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.009088 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.021056 4690 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-nh68h" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.021662 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq"] Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.026476 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-858654f9db-snqb2"] Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.027384 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-snqb2" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.029665 4690 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-jw2xz" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.044966 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-snqb2"] Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.049727 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-gs5zl"] Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.050436 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.054698 4690 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-z4vdk" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.067377 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v98cx\" (UniqueName: \"kubernetes.io/projected/5e031456-428c-4966-8bca-2002a392ffb2-kube-api-access-v98cx\") pod \"cert-manager-858654f9db-snqb2\" (UID: \"5e031456-428c-4966-8bca-2002a392ffb2\") " pod="cert-manager/cert-manager-858654f9db-snqb2" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.067432 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6gg5\" (UniqueName: \"kubernetes.io/projected/1ecae164-1e40-4b85-b047-9e1af1192ef6-kube-api-access-g6gg5\") pod \"cert-manager-cainjector-cf98fcc89-gwtjq\" (UID: \"1ecae164-1e40-4b85-b047-9e1af1192ef6\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.067465 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xfnjz\" (UniqueName: \"kubernetes.io/projected/4b4f7f31-5780-4001-b9c7-7dade6cfea4d-kube-api-access-xfnjz\") pod \"cert-manager-webhook-687f57d79b-gs5zl\" (UID: \"4b4f7f31-5780-4001-b9c7-7dade6cfea4d\") " pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.067734 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-gs5zl"] Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.169060 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6gg5\" (UniqueName: \"kubernetes.io/projected/1ecae164-1e40-4b85-b047-9e1af1192ef6-kube-api-access-g6gg5\") pod \"cert-manager-cainjector-cf98fcc89-gwtjq\" (UID: \"1ecae164-1e40-4b85-b047-9e1af1192ef6\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.169129 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xfnjz\" (UniqueName: \"kubernetes.io/projected/4b4f7f31-5780-4001-b9c7-7dade6cfea4d-kube-api-access-xfnjz\") pod \"cert-manager-webhook-687f57d79b-gs5zl\" (UID: \"4b4f7f31-5780-4001-b9c7-7dade6cfea4d\") " pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.169212 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v98cx\" (UniqueName: \"kubernetes.io/projected/5e031456-428c-4966-8bca-2002a392ffb2-kube-api-access-v98cx\") pod \"cert-manager-858654f9db-snqb2\" (UID: \"5e031456-428c-4966-8bca-2002a392ffb2\") " pod="cert-manager/cert-manager-858654f9db-snqb2" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.186382 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v98cx\" (UniqueName: \"kubernetes.io/projected/5e031456-428c-4966-8bca-2002a392ffb2-kube-api-access-v98cx\") pod \"cert-manager-858654f9db-snqb2\" (UID: \"5e031456-428c-4966-8bca-2002a392ffb2\") " pod="cert-manager/cert-manager-858654f9db-snqb2" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.187196 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6gg5\" (UniqueName: \"kubernetes.io/projected/1ecae164-1e40-4b85-b047-9e1af1192ef6-kube-api-access-g6gg5\") pod \"cert-manager-cainjector-cf98fcc89-gwtjq\" (UID: \"1ecae164-1e40-4b85-b047-9e1af1192ef6\") " pod="cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.187308 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xfnjz\" (UniqueName: \"kubernetes.io/projected/4b4f7f31-5780-4001-b9c7-7dade6cfea4d-kube-api-access-xfnjz\") pod \"cert-manager-webhook-687f57d79b-gs5zl\" (UID: \"4b4f7f31-5780-4001-b9c7-7dade6cfea4d\") " pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.324749 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.342732 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-858654f9db-snqb2" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.366384 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.778651 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq"] Mar 20 13:36:08 crc kubenswrapper[4690]: W0320 13:36:08.786436 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ecae164_1e40_4b85_b047_9e1af1192ef6.slice/crio-ea3bd8c27b6a82735f944d76d0bc4822b1b171a38bb09ac81393c52715f6218c WatchSource:0}: Error finding container ea3bd8c27b6a82735f944d76d0bc4822b1b171a38bb09ac81393c52715f6218c: Status 404 returned error can't find the container with id ea3bd8c27b6a82735f944d76d0bc4822b1b171a38bb09ac81393c52715f6218c Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.853876 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-687f57d79b-gs5zl"] Mar 20 13:36:08 crc kubenswrapper[4690]: I0320 13:36:08.861665 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-858654f9db-snqb2"] Mar 20 13:36:08 crc kubenswrapper[4690]: W0320 13:36:08.865465 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e031456_428c_4966_8bca_2002a392ffb2.slice/crio-8ceb92dc7addcad2090ce25d31105333b70d99c045edec78b4f5474e25c045c9 WatchSource:0}: Error finding container 8ceb92dc7addcad2090ce25d31105333b70d99c045edec78b4f5474e25c045c9: Status 404 returned error can't find the container with id 8ceb92dc7addcad2090ce25d31105333b70d99c045edec78b4f5474e25c045c9 Mar 20 13:36:09 crc kubenswrapper[4690]: I0320 13:36:09.779256 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq" event={"ID":"1ecae164-1e40-4b85-b047-9e1af1192ef6","Type":"ContainerStarted","Data":"ea3bd8c27b6a82735f944d76d0bc4822b1b171a38bb09ac81393c52715f6218c"} Mar 20 13:36:09 crc kubenswrapper[4690]: I0320 13:36:09.780385 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" event={"ID":"4b4f7f31-5780-4001-b9c7-7dade6cfea4d","Type":"ContainerStarted","Data":"9b968cd8d2d6a3dd4cb28ec3311d9ef452bbef65fb6910a88d84020cc4a493b4"} Mar 20 13:36:09 crc kubenswrapper[4690]: I0320 13:36:09.781786 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-snqb2" event={"ID":"5e031456-428c-4966-8bca-2002a392ffb2","Type":"ContainerStarted","Data":"8ceb92dc7addcad2090ce25d31105333b70d99c045edec78b4f5474e25c045c9"} Mar 20 13:36:12 crc kubenswrapper[4690]: I0320 13:36:12.835568 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-858654f9db-snqb2" event={"ID":"5e031456-428c-4966-8bca-2002a392ffb2","Type":"ContainerStarted","Data":"d48b1b9446644095cf2b8a86192ed3d97e0ee422dd767bfa19c98b38ce90042c"} Mar 20 13:36:12 crc kubenswrapper[4690]: I0320 13:36:12.838833 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq" event={"ID":"1ecae164-1e40-4b85-b047-9e1af1192ef6","Type":"ContainerStarted","Data":"af87f5899c56a1d587ffa36713e48118d9499ca08a6b0c805500289b57822eab"} Mar 20 13:36:12 crc kubenswrapper[4690]: I0320 13:36:12.841146 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" event={"ID":"4b4f7f31-5780-4001-b9c7-7dade6cfea4d","Type":"ContainerStarted","Data":"e81a825f3a9c5601824e5372cb9eb561052a9c266c1fe5a9e9770dd328dfa53d"} Mar 20 13:36:12 crc kubenswrapper[4690]: I0320 13:36:12.841308 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" Mar 20 13:36:12 crc kubenswrapper[4690]: I0320 13:36:12.856434 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-858654f9db-snqb2" podStartSLOduration=3.199737176 podStartE2EDuration="5.856410256s" podCreationTimestamp="2026-03-20 13:36:07 +0000 UTC" firstStartedPulling="2026-03-20 13:36:08.86874361 +0000 UTC m=+815.158343573" lastFinishedPulling="2026-03-20 13:36:11.52541671 +0000 UTC m=+817.815016653" observedRunningTime="2026-03-20 13:36:12.849961622 +0000 UTC m=+819.139561575" watchObservedRunningTime="2026-03-20 13:36:12.856410256 +0000 UTC m=+819.146010239" Mar 20 13:36:12 crc kubenswrapper[4690]: I0320 13:36:12.900059 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" podStartSLOduration=1.2025249869999999 podStartE2EDuration="4.900029523s" podCreationTimestamp="2026-03-20 13:36:08 +0000 UTC" firstStartedPulling="2026-03-20 13:36:08.867459393 +0000 UTC m=+815.157059356" lastFinishedPulling="2026-03-20 13:36:12.564963939 +0000 UTC m=+818.854563892" observedRunningTime="2026-03-20 13:36:12.894270468 +0000 UTC m=+819.183870481" watchObservedRunningTime="2026-03-20 13:36:12.900029523 +0000 UTC m=+819.189629476" Mar 20 13:36:12 crc kubenswrapper[4690]: I0320 13:36:12.927874 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-cf98fcc89-gwtjq" podStartSLOduration=2.199855831 podStartE2EDuration="5.927832488s" podCreationTimestamp="2026-03-20 13:36:07 +0000 UTC" firstStartedPulling="2026-03-20 13:36:08.788768372 +0000 UTC m=+815.078368325" lastFinishedPulling="2026-03-20 13:36:12.516745039 +0000 UTC m=+818.806344982" observedRunningTime="2026-03-20 13:36:12.922972679 +0000 UTC m=+819.212572652" watchObservedRunningTime="2026-03-20 13:36:12.927832488 +0000 UTC m=+819.217432431" Mar 20 13:36:15 crc kubenswrapper[4690]: I0320 13:36:15.467839 4690 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.049963 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-x2b7f"] Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.051395 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovn-controller" containerID="cri-o://705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7" gracePeriod=30 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.051460 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="nbdb" containerID="cri-o://e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab" gracePeriod=30 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.051548 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="northd" containerID="cri-o://a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b" gracePeriod=30 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.051595 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kube-rbac-proxy-node" containerID="cri-o://4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a" gracePeriod=30 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.051473 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a" gracePeriod=30 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.051701 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="sbdb" containerID="cri-o://e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb" gracePeriod=30 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.051715 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovn-acl-logging" containerID="cri-o://7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11" gracePeriod=30 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.112046 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" containerID="cri-o://c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab" gracePeriod=30 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.368672 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-687f57d79b-gs5zl" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.398777 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/3.log" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.403533 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovn-acl-logging/0.log" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.404410 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovn-controller/0.log" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.405275 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.464294 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-vgb95"] Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465084 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465179 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465233 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kube-rbac-proxy-node" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465289 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kube-rbac-proxy-node" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465304 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465313 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465352 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kubecfg-setup" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465364 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kubecfg-setup" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465374 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="sbdb" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465449 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="sbdb" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465498 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovn-acl-logging" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465510 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovn-acl-logging" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465523 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovn-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465532 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovn-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465545 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kube-rbac-proxy-ovn-metrics" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465554 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kube-rbac-proxy-ovn-metrics" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465565 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="nbdb" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465573 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="nbdb" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465581 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465589 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465601 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465610 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465620 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="northd" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465628 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="northd" Mar 20 13:36:18 crc kubenswrapper[4690]: E0320 13:36:18.465639 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465647 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465770 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovn-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465789 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465799 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465809 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465820 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kube-rbac-proxy-ovn-metrics" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465831 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465862 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="nbdb" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465873 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="kube-rbac-proxy-node" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465883 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovn-acl-logging" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465896 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="northd" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.465904 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="sbdb" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.466121 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerName="ovnkube-controller" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.468126 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.500827 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-config\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.500885 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovn-node-metrics-cert\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.500911 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-netns\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.500929 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-systemd\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.500965 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-ovn\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.500988 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-systemd-units\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501008 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7x88j\" (UniqueName: \"kubernetes.io/projected/874238ac-6c4c-40c9-ad22-1bec31020fb6-kube-api-access-7x88j\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501026 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-env-overrides\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501042 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-netd\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501062 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-log-socket\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501106 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-etc-openvswitch\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501126 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-script-lib\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501143 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-var-lib-openvswitch\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501166 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-slash\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501186 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-bin\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501215 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-openvswitch\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501238 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-kubelet\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501265 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-node-log\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501283 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501306 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-ovn-kubernetes\") pod \"874238ac-6c4c-40c9-ad22-1bec31020fb6\" (UID: \"874238ac-6c4c-40c9-ad22-1bec31020fb6\") " Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501363 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501416 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501453 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501685 4690 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501700 4690 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501709 4690 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501776 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501812 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501831 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-slash" (OuterVolumeSpecName: "host-slash") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501870 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501890 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501905 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501921 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-node-log" (OuterVolumeSpecName: "node-log") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501938 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501957 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.501972 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.502226 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.502428 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.502457 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-log-socket" (OuterVolumeSpecName: "log-socket") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.502616 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.506601 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/874238ac-6c4c-40c9-ad22-1bec31020fb6-kube-api-access-7x88j" (OuterVolumeSpecName: "kube-api-access-7x88j") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "kube-api-access-7x88j". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.507118 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.514965 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "874238ac-6c4c-40c9-ad22-1bec31020fb6" (UID: "874238ac-6c4c-40c9-ad22-1bec31020fb6"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.602596 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.602644 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-run-ovn\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.602688 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-slash\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.602711 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-systemd-units\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.602747 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-cni-bin\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.602786 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/028e029f-b883-434c-98e8-ba7e54a437e0-ovnkube-config\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.602806 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/028e029f-b883-434c-98e8-ba7e54a437e0-env-overrides\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.602828 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-log-socket\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.602896 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/028e029f-b883-434c-98e8-ba7e54a437e0-ovnkube-script-lib\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603009 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/028e029f-b883-434c-98e8-ba7e54a437e0-ovn-node-metrics-cert\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603034 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-var-lib-openvswitch\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603090 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-run-ovn-kubernetes\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603113 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mc88f\" (UniqueName: \"kubernetes.io/projected/028e029f-b883-434c-98e8-ba7e54a437e0-kube-api-access-mc88f\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603169 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-cni-netd\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603199 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-run-netns\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603255 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-run-systemd\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603315 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-etc-openvswitch\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603377 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-kubelet\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603401 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-run-openvswitch\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603456 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-node-log\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603534 4690 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-kubelet\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603551 4690 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603584 4690 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-node-log\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603597 4690 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603611 4690 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-run-netns\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603622 4690 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-systemd\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603635 4690 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-ovn\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603647 4690 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-systemd-units\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603658 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7x88j\" (UniqueName: \"kubernetes.io/projected/874238ac-6c4c-40c9-ad22-1bec31020fb6-kube-api-access-7x88j\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603669 4690 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-env-overrides\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603681 4690 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-netd\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603692 4690 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-log-socket\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603704 4690 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/874238ac-6c4c-40c9-ad22-1bec31020fb6-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603715 4690 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603726 4690 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-slash\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603737 4690 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-host-cni-bin\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.603748 4690 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/874238ac-6c4c-40c9-ad22-1bec31020fb6-run-openvswitch\") on node \"crc\" DevicePath \"\"" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705115 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-cni-bin\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705168 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/028e029f-b883-434c-98e8-ba7e54a437e0-ovnkube-config\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705224 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/028e029f-b883-434c-98e8-ba7e54a437e0-env-overrides\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705231 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-cni-bin\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705249 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-log-socket\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705463 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/028e029f-b883-434c-98e8-ba7e54a437e0-ovnkube-script-lib\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705508 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-log-socket\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705830 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/028e029f-b883-434c-98e8-ba7e54a437e0-env-overrides\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705933 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/028e029f-b883-434c-98e8-ba7e54a437e0-ovn-node-metrics-cert\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705983 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-var-lib-openvswitch\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.705994 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/028e029f-b883-434c-98e8-ba7e54a437e0-ovnkube-script-lib\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706062 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-var-lib-openvswitch\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706349 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-run-ovn-kubernetes\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706377 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-cni-netd\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706394 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mc88f\" (UniqueName: \"kubernetes.io/projected/028e029f-b883-434c-98e8-ba7e54a437e0-kube-api-access-mc88f\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706408 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-run-netns\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706422 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-run-ovn-kubernetes\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706423 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-run-systemd\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706506 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-etc-openvswitch\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706525 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-kubelet\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706561 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-etc-openvswitch\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706469 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-cni-netd\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706441 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-run-systemd\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706312 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/028e029f-b883-434c-98e8-ba7e54a437e0-ovnkube-config\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706455 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-run-netns\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706616 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-kubelet\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706668 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-run-openvswitch\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706683 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-node-log\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706698 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706715 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-run-ovn\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706768 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706780 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-run-openvswitch\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706786 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-node-log\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706830 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-run-ovn\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706895 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-slash\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706915 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-host-slash\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706959 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-systemd-units\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.706992 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/028e029f-b883-434c-98e8-ba7e54a437e0-systemd-units\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.710439 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/028e029f-b883-434c-98e8-ba7e54a437e0-ovn-node-metrics-cert\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.731369 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mc88f\" (UniqueName: \"kubernetes.io/projected/028e029f-b883-434c-98e8-ba7e54a437e0-kube-api-access-mc88f\") pod \"ovnkube-node-vgb95\" (UID: \"028e029f-b883-434c-98e8-ba7e54a437e0\") " pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.784680 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:18 crc kubenswrapper[4690]: W0320 13:36:18.808047 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod028e029f_b883_434c_98e8_ba7e54a437e0.slice/crio-ca39ebc556bf533ba6fa2e4bba2865b23980c2c7cf8e0861b61d08fd38237af4 WatchSource:0}: Error finding container ca39ebc556bf533ba6fa2e4bba2865b23980c2c7cf8e0861b61d08fd38237af4: Status 404 returned error can't find the container with id ca39ebc556bf533ba6fa2e4bba2865b23980c2c7cf8e0861b61d08fd38237af4 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.887193 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovnkube-controller/3.log" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.890753 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovn-acl-logging/0.log" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.891532 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-x2b7f_874238ac-6c4c-40c9-ad22-1bec31020fb6/ovn-controller/0.log" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892085 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab" exitCode=0 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892124 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb" exitCode=0 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892123 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892202 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892228 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892244 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892138 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab" exitCode=0 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892277 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b" exitCode=0 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892292 4690 scope.go:117] "RemoveContainer" containerID="c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892313 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892356 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892296 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a" exitCode=0 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892434 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a" exitCode=0 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892462 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11" exitCode=143 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892476 4690 generic.go:334] "Generic (PLEG): container finished" podID="874238ac-6c4c-40c9-ad22-1bec31020fb6" containerID="705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7" exitCode=143 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892516 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892588 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892602 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892609 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892616 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892624 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892631 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892638 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892666 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892674 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892685 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892697 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892704 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892711 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892718 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892744 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892751 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892758 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892764 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892770 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892777 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892786 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892796 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892804 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892828 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892835 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892864 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892872 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892878 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892885 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892893 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892900 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892909 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-x2b7f" event={"ID":"874238ac-6c4c-40c9-ad22-1bec31020fb6","Type":"ContainerDied","Data":"a3bc228fd25f99f1051e24b25de3bd09886a3dabb501f4c7b0ed39f61e1151e4"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892919 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892947 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892954 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892961 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892967 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892974 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892981 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892987 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.892994 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.893001 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.895332 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerStarted","Data":"ca39ebc556bf533ba6fa2e4bba2865b23980c2c7cf8e0861b61d08fd38237af4"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.902298 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/2.log" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.903617 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/1.log" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.903674 4690 generic.go:334] "Generic (PLEG): container finished" podID="d83a0d76-2d76-4202-a2f1-42b9ccb66802" containerID="a65e2d6bdf44779e477455b6e6eba4c7902d801329959f6e6286873f8adc0d79" exitCode=2 Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.903704 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgtf4" event={"ID":"d83a0d76-2d76-4202-a2f1-42b9ccb66802","Type":"ContainerDied","Data":"a65e2d6bdf44779e477455b6e6eba4c7902d801329959f6e6286873f8adc0d79"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.903727 4690 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42"} Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.904258 4690 scope.go:117] "RemoveContainer" containerID="a65e2d6bdf44779e477455b6e6eba4c7902d801329959f6e6286873f8adc0d79" Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.961109 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-x2b7f"] Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.969347 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-x2b7f"] Mar 20 13:36:18 crc kubenswrapper[4690]: I0320 13:36:18.979000 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.048909 4690 scope.go:117] "RemoveContainer" containerID="e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.068064 4690 scope.go:117] "RemoveContainer" containerID="e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.080508 4690 scope.go:117] "RemoveContainer" containerID="a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.092367 4690 scope.go:117] "RemoveContainer" containerID="a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.112795 4690 scope.go:117] "RemoveContainer" containerID="4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.129704 4690 scope.go:117] "RemoveContainer" containerID="7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.147664 4690 scope.go:117] "RemoveContainer" containerID="705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.171035 4690 scope.go:117] "RemoveContainer" containerID="2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.210946 4690 scope.go:117] "RemoveContainer" containerID="c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.211426 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": container with ID starting with c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab not found: ID does not exist" containerID="c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.211455 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} err="failed to get container status \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": rpc error: code = NotFound desc = could not find container \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": container with ID starting with c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.211477 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.211799 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\": container with ID starting with adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9 not found: ID does not exist" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.211819 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} err="failed to get container status \"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\": rpc error: code = NotFound desc = could not find container \"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\": container with ID starting with adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.211831 4690 scope.go:117] "RemoveContainer" containerID="e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.212256 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\": container with ID starting with e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb not found: ID does not exist" containerID="e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.212286 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} err="failed to get container status \"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\": rpc error: code = NotFound desc = could not find container \"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\": container with ID starting with e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.212302 4690 scope.go:117] "RemoveContainer" containerID="e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.212587 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\": container with ID starting with e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab not found: ID does not exist" containerID="e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.212610 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} err="failed to get container status \"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\": rpc error: code = NotFound desc = could not find container \"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\": container with ID starting with e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.212627 4690 scope.go:117] "RemoveContainer" containerID="a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.212943 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\": container with ID starting with a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b not found: ID does not exist" containerID="a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.212969 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} err="failed to get container status \"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\": rpc error: code = NotFound desc = could not find container \"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\": container with ID starting with a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.212986 4690 scope.go:117] "RemoveContainer" containerID="a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.213285 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\": container with ID starting with a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a not found: ID does not exist" containerID="a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.213331 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} err="failed to get container status \"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\": rpc error: code = NotFound desc = could not find container \"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\": container with ID starting with a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.213362 4690 scope.go:117] "RemoveContainer" containerID="4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.213655 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\": container with ID starting with 4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a not found: ID does not exist" containerID="4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.213687 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} err="failed to get container status \"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\": rpc error: code = NotFound desc = could not find container \"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\": container with ID starting with 4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.213708 4690 scope.go:117] "RemoveContainer" containerID="7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.214029 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\": container with ID starting with 7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11 not found: ID does not exist" containerID="7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.214050 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} err="failed to get container status \"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\": rpc error: code = NotFound desc = could not find container \"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\": container with ID starting with 7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.214067 4690 scope.go:117] "RemoveContainer" containerID="705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.215566 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\": container with ID starting with 705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7 not found: ID does not exist" containerID="705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.215599 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} err="failed to get container status \"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\": rpc error: code = NotFound desc = could not find container \"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\": container with ID starting with 705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.215617 4690 scope.go:117] "RemoveContainer" containerID="2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3" Mar 20 13:36:19 crc kubenswrapper[4690]: E0320 13:36:19.215934 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\": container with ID starting with 2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3 not found: ID does not exist" containerID="2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.215956 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3"} err="failed to get container status \"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\": rpc error: code = NotFound desc = could not find container \"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\": container with ID starting with 2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.215972 4690 scope.go:117] "RemoveContainer" containerID="c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.216184 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} err="failed to get container status \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": rpc error: code = NotFound desc = could not find container \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": container with ID starting with c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.216209 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.216398 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} err="failed to get container status \"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\": rpc error: code = NotFound desc = could not find container \"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\": container with ID starting with adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.216418 4690 scope.go:117] "RemoveContainer" containerID="e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.216695 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} err="failed to get container status \"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\": rpc error: code = NotFound desc = could not find container \"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\": container with ID starting with e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.216716 4690 scope.go:117] "RemoveContainer" containerID="e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.216939 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} err="failed to get container status \"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\": rpc error: code = NotFound desc = could not find container \"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\": container with ID starting with e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.216961 4690 scope.go:117] "RemoveContainer" containerID="a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.217161 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} err="failed to get container status \"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\": rpc error: code = NotFound desc = could not find container \"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\": container with ID starting with a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.217184 4690 scope.go:117] "RemoveContainer" containerID="a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.217375 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} err="failed to get container status \"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\": rpc error: code = NotFound desc = could not find container \"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\": container with ID starting with a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.217397 4690 scope.go:117] "RemoveContainer" containerID="4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.217592 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} err="failed to get container status \"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\": rpc error: code = NotFound desc = could not find container \"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\": container with ID starting with 4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.217622 4690 scope.go:117] "RemoveContainer" containerID="7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.217897 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} err="failed to get container status \"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\": rpc error: code = NotFound desc = could not find container \"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\": container with ID starting with 7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.217920 4690 scope.go:117] "RemoveContainer" containerID="705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.218219 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} err="failed to get container status \"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\": rpc error: code = NotFound desc = could not find container \"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\": container with ID starting with 705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.218238 4690 scope.go:117] "RemoveContainer" containerID="2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.218445 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3"} err="failed to get container status \"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\": rpc error: code = NotFound desc = could not find container \"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\": container with ID starting with 2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.218479 4690 scope.go:117] "RemoveContainer" containerID="c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.243269 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} err="failed to get container status \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": rpc error: code = NotFound desc = could not find container \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": container with ID starting with c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.243327 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.243925 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} err="failed to get container status \"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\": rpc error: code = NotFound desc = could not find container \"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\": container with ID starting with adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.243954 4690 scope.go:117] "RemoveContainer" containerID="e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.244241 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} err="failed to get container status \"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\": rpc error: code = NotFound desc = could not find container \"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\": container with ID starting with e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.244259 4690 scope.go:117] "RemoveContainer" containerID="e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.244485 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} err="failed to get container status \"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\": rpc error: code = NotFound desc = could not find container \"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\": container with ID starting with e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.244505 4690 scope.go:117] "RemoveContainer" containerID="a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.244925 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} err="failed to get container status \"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\": rpc error: code = NotFound desc = could not find container \"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\": container with ID starting with a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.244947 4690 scope.go:117] "RemoveContainer" containerID="a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.245200 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} err="failed to get container status \"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\": rpc error: code = NotFound desc = could not find container \"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\": container with ID starting with a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.245232 4690 scope.go:117] "RemoveContainer" containerID="4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.245601 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} err="failed to get container status \"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\": rpc error: code = NotFound desc = could not find container \"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\": container with ID starting with 4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.245620 4690 scope.go:117] "RemoveContainer" containerID="7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.245889 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} err="failed to get container status \"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\": rpc error: code = NotFound desc = could not find container \"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\": container with ID starting with 7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.245922 4690 scope.go:117] "RemoveContainer" containerID="705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.246465 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} err="failed to get container status \"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\": rpc error: code = NotFound desc = could not find container \"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\": container with ID starting with 705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.246486 4690 scope.go:117] "RemoveContainer" containerID="2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.246757 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3"} err="failed to get container status \"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\": rpc error: code = NotFound desc = could not find container \"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\": container with ID starting with 2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.246797 4690 scope.go:117] "RemoveContainer" containerID="c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.247115 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} err="failed to get container status \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": rpc error: code = NotFound desc = could not find container \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": container with ID starting with c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.247135 4690 scope.go:117] "RemoveContainer" containerID="adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.247374 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9"} err="failed to get container status \"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\": rpc error: code = NotFound desc = could not find container \"adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9\": container with ID starting with adf4791b054e8fa3d8e6469812cc3cbe7be8c490dee13c0bbc1698edcf6330b9 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.247393 4690 scope.go:117] "RemoveContainer" containerID="e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.247633 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb"} err="failed to get container status \"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\": rpc error: code = NotFound desc = could not find container \"e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb\": container with ID starting with e2865d02c67e467cf5db9f31d511c4f0b1b55770ce8b0a39c07c1c53e0bdbebb not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.247652 4690 scope.go:117] "RemoveContainer" containerID="e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.247910 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab"} err="failed to get container status \"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\": rpc error: code = NotFound desc = could not find container \"e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab\": container with ID starting with e80a4402c71a271cc6860758068c0d529ab32da8b5f05727c12a7907ccb875ab not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.247935 4690 scope.go:117] "RemoveContainer" containerID="a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.248189 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b"} err="failed to get container status \"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\": rpc error: code = NotFound desc = could not find container \"a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b\": container with ID starting with a4d7d7ba6ab1de531f414c9caccb20bf794afef949cf295d2bab12c296dc272b not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.248209 4690 scope.go:117] "RemoveContainer" containerID="a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.248454 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a"} err="failed to get container status \"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\": rpc error: code = NotFound desc = could not find container \"a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a\": container with ID starting with a2c79f9019950b4375ea19fc1d48ad1e9b536cadb39053a10e13cf7d3b6d2d3a not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.248481 4690 scope.go:117] "RemoveContainer" containerID="4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.248750 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a"} err="failed to get container status \"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\": rpc error: code = NotFound desc = could not find container \"4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a\": container with ID starting with 4ccb0c310f027e4cb37f58cb81579203a48009cdcae22a65d183d962fc39232a not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.248774 4690 scope.go:117] "RemoveContainer" containerID="7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.249096 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11"} err="failed to get container status \"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\": rpc error: code = NotFound desc = could not find container \"7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11\": container with ID starting with 7437ce2e60cfe52e1a5ce4850730eb6326923218ef395dd394184c6e72aeeb11 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.249117 4690 scope.go:117] "RemoveContainer" containerID="705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.249403 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7"} err="failed to get container status \"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\": rpc error: code = NotFound desc = could not find container \"705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7\": container with ID starting with 705f93dc977d53dc792d2a39ac3f5029aa0ee0641e68eb8be7d368c56d2058b7 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.249423 4690 scope.go:117] "RemoveContainer" containerID="2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.249701 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3"} err="failed to get container status \"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\": rpc error: code = NotFound desc = could not find container \"2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3\": container with ID starting with 2e548990806c54c6b7e511fa0c80759d17de3f14a510c9c8d75da800a57535d3 not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.249742 4690 scope.go:117] "RemoveContainer" containerID="c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.250008 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab"} err="failed to get container status \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": rpc error: code = NotFound desc = could not find container \"c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab\": container with ID starting with c7cfa83366818be709f126f6b9eaff1769012040e8da95239b052ab061c64dab not found: ID does not exist" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.910030 4690 generic.go:334] "Generic (PLEG): container finished" podID="028e029f-b883-434c-98e8-ba7e54a437e0" containerID="304e8376567c8ff5ae6ae565a8033257418b37dbb255e4e18bf222cd6a9966d7" exitCode=0 Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.910109 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerDied","Data":"304e8376567c8ff5ae6ae565a8033257418b37dbb255e4e18bf222cd6a9966d7"} Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.912430 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/2.log" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.913033 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/1.log" Mar 20 13:36:19 crc kubenswrapper[4690]: I0320 13:36:19.913137 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-pgtf4" event={"ID":"d83a0d76-2d76-4202-a2f1-42b9ccb66802","Type":"ContainerStarted","Data":"8cd90ff7da7678a93fcf523c5a0188a136aa330591ffa250a4e2c961fd76fd8e"} Mar 20 13:36:20 crc kubenswrapper[4690]: I0320 13:36:20.429725 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="874238ac-6c4c-40c9-ad22-1bec31020fb6" path="/var/lib/kubelet/pods/874238ac-6c4c-40c9-ad22-1bec31020fb6/volumes" Mar 20 13:36:20 crc kubenswrapper[4690]: I0320 13:36:20.926642 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerStarted","Data":"43e7ecc1c69ac24a3d0d0103f34f862816e6b2c132c8a7eef4e974afb72e114c"} Mar 20 13:36:20 crc kubenswrapper[4690]: I0320 13:36:20.927144 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerStarted","Data":"de108e86f09e96cea87a329cc481c0367b3a1b5c11f739d556f8d45d96d15ef2"} Mar 20 13:36:20 crc kubenswrapper[4690]: I0320 13:36:20.927167 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerStarted","Data":"71105f45852f7ede76283820039a094916a4eff98385049ec8a76eba62a21d3f"} Mar 20 13:36:20 crc kubenswrapper[4690]: I0320 13:36:20.927186 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerStarted","Data":"b54be795a0e4d351232214b55c7c5458262860078c964ea08f5855ed2f620963"} Mar 20 13:36:20 crc kubenswrapper[4690]: I0320 13:36:20.927206 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerStarted","Data":"0af8c41e9d8987951b7cb5c9a799217b29c34eed7005e81c97e664b0191b176a"} Mar 20 13:36:20 crc kubenswrapper[4690]: I0320 13:36:20.927223 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerStarted","Data":"5f76957ef6c03b86910a70ed7128446f4121a2afccffe21884ffaeb64ba22429"} Mar 20 13:36:23 crc kubenswrapper[4690]: I0320 13:36:23.956485 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerStarted","Data":"fddc93b0bd462c5b666fe4677d6ab7d7f9805b9e0644ebe62041c87637e25d63"} Mar 20 13:36:25 crc kubenswrapper[4690]: I0320 13:36:25.978015 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" event={"ID":"028e029f-b883-434c-98e8-ba7e54a437e0","Type":"ContainerStarted","Data":"c72019a7286a1b34a9f1412a5acfdd105055d737a3a773e8df3b2d2f7610e881"} Mar 20 13:36:25 crc kubenswrapper[4690]: I0320 13:36:25.978668 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:25 crc kubenswrapper[4690]: I0320 13:36:25.978692 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:26 crc kubenswrapper[4690]: I0320 13:36:26.028657 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" podStartSLOduration=8.028634278 podStartE2EDuration="8.028634278s" podCreationTimestamp="2026-03-20 13:36:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:36:26.020060842 +0000 UTC m=+832.309660825" watchObservedRunningTime="2026-03-20 13:36:26.028634278 +0000 UTC m=+832.318234261" Mar 20 13:36:26 crc kubenswrapper[4690]: I0320 13:36:26.034161 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:26 crc kubenswrapper[4690]: I0320 13:36:26.983730 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:27 crc kubenswrapper[4690]: I0320 13:36:27.021188 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:42 crc kubenswrapper[4690]: I0320 13:36:42.833372 4690 scope.go:117] "RemoveContainer" containerID="1e46750da958eb5d82f18e6cfdf3aa39bfb3e63d4ff0d14597b110eb5d81113f" Mar 20 13:36:42 crc kubenswrapper[4690]: I0320 13:36:42.877440 4690 scope.go:117] "RemoveContainer" containerID="95875c7da51b76da26cf600f1c120a6b555d6b8d36477259a45a3bba401b6a42" Mar 20 13:36:43 crc kubenswrapper[4690]: I0320 13:36:43.100790 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-pgtf4_d83a0d76-2d76-4202-a2f1-42b9ccb66802/kube-multus/2.log" Mar 20 13:36:48 crc kubenswrapper[4690]: I0320 13:36:48.826600 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-vgb95" Mar 20 13:36:57 crc kubenswrapper[4690]: I0320 13:36:57.963106 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb"] Mar 20 13:36:57 crc kubenswrapper[4690]: I0320 13:36:57.964826 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:57 crc kubenswrapper[4690]: I0320 13:36:57.967545 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:57.989974 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb"] Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.167118 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-bundle\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.167189 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82hpv\" (UniqueName: \"kubernetes.io/projected/8ce85d01-b103-4ada-930c-10ca16d9801d-kube-api-access-82hpv\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.167291 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-util\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.267974 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-util\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.268088 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-bundle\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.268148 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82hpv\" (UniqueName: \"kubernetes.io/projected/8ce85d01-b103-4ada-930c-10ca16d9801d-kube-api-access-82hpv\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.268896 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-util\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.268929 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-bundle\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.290714 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82hpv\" (UniqueName: \"kubernetes.io/projected/8ce85d01-b103-4ada-930c-10ca16d9801d-kube-api-access-82hpv\") pod \"1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.336726 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:36:58 crc kubenswrapper[4690]: I0320 13:36:58.789151 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb"] Mar 20 13:36:58 crc kubenswrapper[4690]: W0320 13:36:58.798300 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ce85d01_b103_4ada_930c_10ca16d9801d.slice/crio-43c763816c5451d9d9a19b197d64c6c52698fa09c6bf41598c57277949fa69d7 WatchSource:0}: Error finding container 43c763816c5451d9d9a19b197d64c6c52698fa09c6bf41598c57277949fa69d7: Status 404 returned error can't find the container with id 43c763816c5451d9d9a19b197d64c6c52698fa09c6bf41598c57277949fa69d7 Mar 20 13:36:59 crc kubenswrapper[4690]: I0320 13:36:59.204465 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" event={"ID":"8ce85d01-b103-4ada-930c-10ca16d9801d","Type":"ContainerStarted","Data":"fa0887d55c0c084b34ffcba1b8c12a94e1392c198eb9aebcb1afadb5b6e39351"} Mar 20 13:36:59 crc kubenswrapper[4690]: I0320 13:36:59.204499 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" event={"ID":"8ce85d01-b103-4ada-930c-10ca16d9801d","Type":"ContainerStarted","Data":"43c763816c5451d9d9a19b197d64c6c52698fa09c6bf41598c57277949fa69d7"} Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.213306 4690 generic.go:334] "Generic (PLEG): container finished" podID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerID="fa0887d55c0c084b34ffcba1b8c12a94e1392c198eb9aebcb1afadb5b6e39351" exitCode=0 Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.213515 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" event={"ID":"8ce85d01-b103-4ada-930c-10ca16d9801d","Type":"ContainerDied","Data":"fa0887d55c0c084b34ffcba1b8c12a94e1392c198eb9aebcb1afadb5b6e39351"} Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.323436 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fz2qz"] Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.330380 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.338653 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fz2qz"] Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.495706 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7d9lx\" (UniqueName: \"kubernetes.io/projected/eca51b99-1947-44d0-a86c-d8f0302337ba-kube-api-access-7d9lx\") pod \"redhat-operators-fz2qz\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.495946 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-utilities\") pod \"redhat-operators-fz2qz\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.496016 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-catalog-content\") pod \"redhat-operators-fz2qz\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.596467 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7d9lx\" (UniqueName: \"kubernetes.io/projected/eca51b99-1947-44d0-a86c-d8f0302337ba-kube-api-access-7d9lx\") pod \"redhat-operators-fz2qz\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.596575 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-utilities\") pod \"redhat-operators-fz2qz\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.596616 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-catalog-content\") pod \"redhat-operators-fz2qz\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.597285 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-utilities\") pod \"redhat-operators-fz2qz\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.597355 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-catalog-content\") pod \"redhat-operators-fz2qz\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.619279 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7d9lx\" (UniqueName: \"kubernetes.io/projected/eca51b99-1947-44d0-a86c-d8f0302337ba-kube-api-access-7d9lx\") pod \"redhat-operators-fz2qz\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.666524 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:00 crc kubenswrapper[4690]: I0320 13:37:00.850427 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fz2qz"] Mar 20 13:37:00 crc kubenswrapper[4690]: W0320 13:37:00.861060 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeca51b99_1947_44d0_a86c_d8f0302337ba.slice/crio-997675ace485b9c59b3c76c2e7f78b4a65a9bfaf35eaada4df8504529ea9a98b WatchSource:0}: Error finding container 997675ace485b9c59b3c76c2e7f78b4a65a9bfaf35eaada4df8504529ea9a98b: Status 404 returned error can't find the container with id 997675ace485b9c59b3c76c2e7f78b4a65a9bfaf35eaada4df8504529ea9a98b Mar 20 13:37:01 crc kubenswrapper[4690]: I0320 13:37:01.221461 4690 generic.go:334] "Generic (PLEG): container finished" podID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerID="d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237" exitCode=0 Mar 20 13:37:01 crc kubenswrapper[4690]: I0320 13:37:01.221643 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz2qz" event={"ID":"eca51b99-1947-44d0-a86c-d8f0302337ba","Type":"ContainerDied","Data":"d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237"} Mar 20 13:37:01 crc kubenswrapper[4690]: I0320 13:37:01.221741 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz2qz" event={"ID":"eca51b99-1947-44d0-a86c-d8f0302337ba","Type":"ContainerStarted","Data":"997675ace485b9c59b3c76c2e7f78b4a65a9bfaf35eaada4df8504529ea9a98b"} Mar 20 13:37:01 crc kubenswrapper[4690]: I0320 13:37:01.225040 4690 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 13:37:03 crc kubenswrapper[4690]: I0320 13:37:03.238606 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz2qz" event={"ID":"eca51b99-1947-44d0-a86c-d8f0302337ba","Type":"ContainerStarted","Data":"7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e"} Mar 20 13:37:03 crc kubenswrapper[4690]: I0320 13:37:03.241984 4690 generic.go:334] "Generic (PLEG): container finished" podID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerID="fca51fa5ed9c9c1fdf01171fc4fb44d20708d12693c8d7c8f3f8c0f1d7d5ab54" exitCode=0 Mar 20 13:37:03 crc kubenswrapper[4690]: I0320 13:37:03.242030 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" event={"ID":"8ce85d01-b103-4ada-930c-10ca16d9801d","Type":"ContainerDied","Data":"fca51fa5ed9c9c1fdf01171fc4fb44d20708d12693c8d7c8f3f8c0f1d7d5ab54"} Mar 20 13:37:03 crc kubenswrapper[4690]: I0320 13:37:03.829640 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:37:03 crc kubenswrapper[4690]: I0320 13:37:03.830108 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:37:04 crc kubenswrapper[4690]: I0320 13:37:04.254309 4690 generic.go:334] "Generic (PLEG): container finished" podID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerID="844426be6ef867cd5371cab5e5e770ff582bbc876a1580b05f2be3c4a7cb8921" exitCode=0 Mar 20 13:37:04 crc kubenswrapper[4690]: I0320 13:37:04.254385 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" event={"ID":"8ce85d01-b103-4ada-930c-10ca16d9801d","Type":"ContainerDied","Data":"844426be6ef867cd5371cab5e5e770ff582bbc876a1580b05f2be3c4a7cb8921"} Mar 20 13:37:04 crc kubenswrapper[4690]: I0320 13:37:04.256923 4690 generic.go:334] "Generic (PLEG): container finished" podID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerID="7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e" exitCode=0 Mar 20 13:37:04 crc kubenswrapper[4690]: I0320 13:37:04.256985 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz2qz" event={"ID":"eca51b99-1947-44d0-a86c-d8f0302337ba","Type":"ContainerDied","Data":"7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e"} Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.266962 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz2qz" event={"ID":"eca51b99-1947-44d0-a86c-d8f0302337ba","Type":"ContainerStarted","Data":"c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51"} Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.283047 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fz2qz" podStartSLOduration=1.581013972 podStartE2EDuration="5.283028621s" podCreationTimestamp="2026-03-20 13:37:00 +0000 UTC" firstStartedPulling="2026-03-20 13:37:01.224741692 +0000 UTC m=+867.514341645" lastFinishedPulling="2026-03-20 13:37:04.926756321 +0000 UTC m=+871.216356294" observedRunningTime="2026-03-20 13:37:05.281500968 +0000 UTC m=+871.571100921" watchObservedRunningTime="2026-03-20 13:37:05.283028621 +0000 UTC m=+871.572628564" Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.554140 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.659771 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-util\") pod \"8ce85d01-b103-4ada-930c-10ca16d9801d\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.659905 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-82hpv\" (UniqueName: \"kubernetes.io/projected/8ce85d01-b103-4ada-930c-10ca16d9801d-kube-api-access-82hpv\") pod \"8ce85d01-b103-4ada-930c-10ca16d9801d\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.660011 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-bundle\") pod \"8ce85d01-b103-4ada-930c-10ca16d9801d\" (UID: \"8ce85d01-b103-4ada-930c-10ca16d9801d\") " Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.660606 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-bundle" (OuterVolumeSpecName: "bundle") pod "8ce85d01-b103-4ada-930c-10ca16d9801d" (UID: "8ce85d01-b103-4ada-930c-10ca16d9801d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.670383 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ce85d01-b103-4ada-930c-10ca16d9801d-kube-api-access-82hpv" (OuterVolumeSpecName: "kube-api-access-82hpv") pod "8ce85d01-b103-4ada-930c-10ca16d9801d" (UID: "8ce85d01-b103-4ada-930c-10ca16d9801d"). InnerVolumeSpecName "kube-api-access-82hpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.675976 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-util" (OuterVolumeSpecName: "util") pod "8ce85d01-b103-4ada-930c-10ca16d9801d" (UID: "8ce85d01-b103-4ada-930c-10ca16d9801d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.762174 4690 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-util\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.762246 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-82hpv\" (UniqueName: \"kubernetes.io/projected/8ce85d01-b103-4ada-930c-10ca16d9801d-kube-api-access-82hpv\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:05 crc kubenswrapper[4690]: I0320 13:37:05.762277 4690 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/8ce85d01-b103-4ada-930c-10ca16d9801d-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:06 crc kubenswrapper[4690]: I0320 13:37:06.277591 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" event={"ID":"8ce85d01-b103-4ada-930c-10ca16d9801d","Type":"ContainerDied","Data":"43c763816c5451d9d9a19b197d64c6c52698fa09c6bf41598c57277949fa69d7"} Mar 20 13:37:06 crc kubenswrapper[4690]: I0320 13:37:06.277639 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="43c763816c5451d9d9a19b197d64c6c52698fa09c6bf41598c57277949fa69d7" Mar 20 13:37:06 crc kubenswrapper[4690]: I0320 13:37:06.277753 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.250157 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p"] Mar 20 13:37:08 crc kubenswrapper[4690]: E0320 13:37:08.250715 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerName="pull" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.250733 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerName="pull" Mar 20 13:37:08 crc kubenswrapper[4690]: E0320 13:37:08.250764 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerName="extract" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.250777 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerName="extract" Mar 20 13:37:08 crc kubenswrapper[4690]: E0320 13:37:08.250790 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerName="util" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.250804 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerName="util" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.250947 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ce85d01-b103-4ada-930c-10ca16d9801d" containerName="extract" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.251407 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.254737 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.254822 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-24lmk" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.254988 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.261722 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p"] Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.299745 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxp5r\" (UniqueName: \"kubernetes.io/projected/244f1908-ebf4-4923-afc9-30bab5acf6bd-kube-api-access-bxp5r\") pod \"nmstate-operator-796d4cfff4-v4l8p\" (UID: \"244f1908-ebf4-4923-afc9-30bab5acf6bd\") " pod="openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.400723 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxp5r\" (UniqueName: \"kubernetes.io/projected/244f1908-ebf4-4923-afc9-30bab5acf6bd-kube-api-access-bxp5r\") pod \"nmstate-operator-796d4cfff4-v4l8p\" (UID: \"244f1908-ebf4-4923-afc9-30bab5acf6bd\") " pod="openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.425622 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxp5r\" (UniqueName: \"kubernetes.io/projected/244f1908-ebf4-4923-afc9-30bab5acf6bd-kube-api-access-bxp5r\") pod \"nmstate-operator-796d4cfff4-v4l8p\" (UID: \"244f1908-ebf4-4923-afc9-30bab5acf6bd\") " pod="openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p" Mar 20 13:37:08 crc kubenswrapper[4690]: I0320 13:37:08.567918 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p" Mar 20 13:37:09 crc kubenswrapper[4690]: I0320 13:37:09.016828 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p"] Mar 20 13:37:09 crc kubenswrapper[4690]: I0320 13:37:09.298322 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p" event={"ID":"244f1908-ebf4-4923-afc9-30bab5acf6bd","Type":"ContainerStarted","Data":"3979ffa769998b7ae623ec33f7263a2bc67b744818a0dd34edfc0aafbe345745"} Mar 20 13:37:10 crc kubenswrapper[4690]: I0320 13:37:10.668406 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:10 crc kubenswrapper[4690]: I0320 13:37:10.668651 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:11 crc kubenswrapper[4690]: I0320 13:37:11.733542 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-fz2qz" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerName="registry-server" probeResult="failure" output=< Mar 20 13:37:11 crc kubenswrapper[4690]: timeout: failed to connect service ":50051" within 1s Mar 20 13:37:11 crc kubenswrapper[4690]: > Mar 20 13:37:12 crc kubenswrapper[4690]: I0320 13:37:12.329262 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p" event={"ID":"244f1908-ebf4-4923-afc9-30bab5acf6bd","Type":"ContainerStarted","Data":"e0ae464c972e5d82139a3b6116f43fcb37cb772b88c315c57a66ac6e2023afd3"} Mar 20 13:37:12 crc kubenswrapper[4690]: I0320 13:37:12.358225 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-796d4cfff4-v4l8p" podStartSLOduration=1.5560937940000001 podStartE2EDuration="4.358203311s" podCreationTimestamp="2026-03-20 13:37:08 +0000 UTC" firstStartedPulling="2026-03-20 13:37:09.027750114 +0000 UTC m=+875.317350067" lastFinishedPulling="2026-03-20 13:37:11.829859641 +0000 UTC m=+878.119459584" observedRunningTime="2026-03-20 13:37:12.35603285 +0000 UTC m=+878.645632803" watchObservedRunningTime="2026-03-20 13:37:12.358203311 +0000 UTC m=+878.647803274" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.150076 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k"] Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.151675 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.153547 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-qx8sc" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.165024 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k"] Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.192486 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-fbbgx"] Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.193477 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.204484 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f558f5558-gss2p"] Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.205430 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.211344 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.213923 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f558f5558-gss2p"] Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.304403 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm"] Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.305182 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.307263 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.307459 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.308038 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-qc7s6" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.319624 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm"] Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.329460 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/693ab8f6-3c6b-4aea-a488-6b9b17bcd249-tls-key-pair\") pod \"nmstate-webhook-5f558f5558-gss2p\" (UID: \"693ab8f6-3c6b-4aea-a488-6b9b17bcd249\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.329501 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f27e925b-b583-4ebb-9c76-8b94e717572b-dbus-socket\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.329525 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f27e925b-b583-4ebb-9c76-8b94e717572b-nmstate-lock\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.329542 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdzrw\" (UniqueName: \"kubernetes.io/projected/f27e925b-b583-4ebb-9c76-8b94e717572b-kube-api-access-kdzrw\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.329586 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm6fj\" (UniqueName: \"kubernetes.io/projected/693ab8f6-3c6b-4aea-a488-6b9b17bcd249-kube-api-access-lm6fj\") pod \"nmstate-webhook-5f558f5558-gss2p\" (UID: \"693ab8f6-3c6b-4aea-a488-6b9b17bcd249\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.329636 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b29m2\" (UniqueName: \"kubernetes.io/projected/7b110386-4970-4b99-ab60-783130776002-kube-api-access-b29m2\") pod \"nmstate-metrics-9b8c8685d-njk8k\" (UID: \"7b110386-4970-4b99-ab60-783130776002\") " pod="openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.329965 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f27e925b-b583-4ebb-9c76-8b94e717572b-ovs-socket\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.431819 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/693ab8f6-3c6b-4aea-a488-6b9b17bcd249-tls-key-pair\") pod \"nmstate-webhook-5f558f5558-gss2p\" (UID: \"693ab8f6-3c6b-4aea-a488-6b9b17bcd249\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.431889 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f27e925b-b583-4ebb-9c76-8b94e717572b-dbus-socket\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.431920 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f27e925b-b583-4ebb-9c76-8b94e717572b-nmstate-lock\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.431941 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdzrw\" (UniqueName: \"kubernetes.io/projected/f27e925b-b583-4ebb-9c76-8b94e717572b-kube-api-access-kdzrw\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.431964 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm6fj\" (UniqueName: \"kubernetes.io/projected/693ab8f6-3c6b-4aea-a488-6b9b17bcd249-kube-api-access-lm6fj\") pod \"nmstate-webhook-5f558f5558-gss2p\" (UID: \"693ab8f6-3c6b-4aea-a488-6b9b17bcd249\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.431989 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/96ef8c08-d917-4818-aa0b-a8f40e03d5af-nginx-conf\") pod \"nmstate-console-plugin-86f58fcf4-8qxkm\" (UID: \"96ef8c08-d917-4818-aa0b-a8f40e03d5af\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: E0320 13:37:18.432018 4690 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Mar 20 13:37:18 crc kubenswrapper[4690]: E0320 13:37:18.432103 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/693ab8f6-3c6b-4aea-a488-6b9b17bcd249-tls-key-pair podName:693ab8f6-3c6b-4aea-a488-6b9b17bcd249 nodeName:}" failed. No retries permitted until 2026-03-20 13:37:18.932079802 +0000 UTC m=+885.221679825 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/693ab8f6-3c6b-4aea-a488-6b9b17bcd249-tls-key-pair") pod "nmstate-webhook-5f558f5558-gss2p" (UID: "693ab8f6-3c6b-4aea-a488-6b9b17bcd249") : secret "openshift-nmstate-webhook" not found Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.432215 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/f27e925b-b583-4ebb-9c76-8b94e717572b-dbus-socket\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.432033 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b29m2\" (UniqueName: \"kubernetes.io/projected/7b110386-4970-4b99-ab60-783130776002-kube-api-access-b29m2\") pod \"nmstate-metrics-9b8c8685d-njk8k\" (UID: \"7b110386-4970-4b99-ab60-783130776002\") " pod="openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.432313 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f27e925b-b583-4ebb-9c76-8b94e717572b-ovs-socket\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.432353 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/f27e925b-b583-4ebb-9c76-8b94e717572b-nmstate-lock\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.432396 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/f27e925b-b583-4ebb-9c76-8b94e717572b-ovs-socket\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.432445 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xs945\" (UniqueName: \"kubernetes.io/projected/96ef8c08-d917-4818-aa0b-a8f40e03d5af-kube-api-access-xs945\") pod \"nmstate-console-plugin-86f58fcf4-8qxkm\" (UID: \"96ef8c08-d917-4818-aa0b-a8f40e03d5af\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.432526 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/96ef8c08-d917-4818-aa0b-a8f40e03d5af-plugin-serving-cert\") pod \"nmstate-console-plugin-86f58fcf4-8qxkm\" (UID: \"96ef8c08-d917-4818-aa0b-a8f40e03d5af\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.456443 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdzrw\" (UniqueName: \"kubernetes.io/projected/f27e925b-b583-4ebb-9c76-8b94e717572b-kube-api-access-kdzrw\") pod \"nmstate-handler-fbbgx\" (UID: \"f27e925b-b583-4ebb-9c76-8b94e717572b\") " pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.456710 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm6fj\" (UniqueName: \"kubernetes.io/projected/693ab8f6-3c6b-4aea-a488-6b9b17bcd249-kube-api-access-lm6fj\") pod \"nmstate-webhook-5f558f5558-gss2p\" (UID: \"693ab8f6-3c6b-4aea-a488-6b9b17bcd249\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.470712 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b29m2\" (UniqueName: \"kubernetes.io/projected/7b110386-4970-4b99-ab60-783130776002-kube-api-access-b29m2\") pod \"nmstate-metrics-9b8c8685d-njk8k\" (UID: \"7b110386-4970-4b99-ab60-783130776002\") " pod="openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.477832 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.491624 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-6fd47b444f-wv59m"] Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.492458 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.501439 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6fd47b444f-wv59m"] Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.514248 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.533512 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xs945\" (UniqueName: \"kubernetes.io/projected/96ef8c08-d917-4818-aa0b-a8f40e03d5af-kube-api-access-xs945\") pod \"nmstate-console-plugin-86f58fcf4-8qxkm\" (UID: \"96ef8c08-d917-4818-aa0b-a8f40e03d5af\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.533820 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/96ef8c08-d917-4818-aa0b-a8f40e03d5af-plugin-serving-cert\") pod \"nmstate-console-plugin-86f58fcf4-8qxkm\" (UID: \"96ef8c08-d917-4818-aa0b-a8f40e03d5af\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.533914 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/96ef8c08-d917-4818-aa0b-a8f40e03d5af-nginx-conf\") pod \"nmstate-console-plugin-86f58fcf4-8qxkm\" (UID: \"96ef8c08-d917-4818-aa0b-a8f40e03d5af\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.535222 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/96ef8c08-d917-4818-aa0b-a8f40e03d5af-nginx-conf\") pod \"nmstate-console-plugin-86f58fcf4-8qxkm\" (UID: \"96ef8c08-d917-4818-aa0b-a8f40e03d5af\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.542545 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/96ef8c08-d917-4818-aa0b-a8f40e03d5af-plugin-serving-cert\") pod \"nmstate-console-plugin-86f58fcf4-8qxkm\" (UID: \"96ef8c08-d917-4818-aa0b-a8f40e03d5af\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.558794 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xs945\" (UniqueName: \"kubernetes.io/projected/96ef8c08-d917-4818-aa0b-a8f40e03d5af-kube-api-access-xs945\") pod \"nmstate-console-plugin-86f58fcf4-8qxkm\" (UID: \"96ef8c08-d917-4818-aa0b-a8f40e03d5af\") " pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.622646 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.634568 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-console-config\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.634630 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzrmh\" (UniqueName: \"kubernetes.io/projected/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-kube-api-access-nzrmh\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.634677 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-oauth-serving-cert\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.634703 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-service-ca\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.634749 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-trusted-ca-bundle\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.634786 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-console-oauth-config\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.634821 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-console-serving-cert\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.735624 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-trusted-ca-bundle\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.735690 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-console-oauth-config\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.735722 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-console-serving-cert\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.735767 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-console-config\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.735798 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzrmh\" (UniqueName: \"kubernetes.io/projected/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-kube-api-access-nzrmh\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.735836 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-oauth-serving-cert\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.735877 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-service-ca\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.737093 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-trusted-ca-bundle\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.737146 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-oauth-serving-cert\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.737147 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-service-ca\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.737469 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-console-config\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.740573 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-console-serving-cert\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.741656 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-console-oauth-config\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.757787 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzrmh\" (UniqueName: \"kubernetes.io/projected/24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4-kube-api-access-nzrmh\") pod \"console-6fd47b444f-wv59m\" (UID: \"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4\") " pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.796372 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm"] Mar 20 13:37:18 crc kubenswrapper[4690]: W0320 13:37:18.804043 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod96ef8c08_d917_4818_aa0b_a8f40e03d5af.slice/crio-5c38a0eb8ea549f255027d6cfa15e0434bf9a68052c71f0a682c220a6320bd4e WatchSource:0}: Error finding container 5c38a0eb8ea549f255027d6cfa15e0434bf9a68052c71f0a682c220a6320bd4e: Status 404 returned error can't find the container with id 5c38a0eb8ea549f255027d6cfa15e0434bf9a68052c71f0a682c220a6320bd4e Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.870901 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.901476 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k"] Mar 20 13:37:18 crc kubenswrapper[4690]: W0320 13:37:18.911784 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b110386_4970_4b99_ab60_783130776002.slice/crio-767f7881dadc02cf9d6b3dee538c2035f49b5f03bc3f737ce3290ca8aba32a6e WatchSource:0}: Error finding container 767f7881dadc02cf9d6b3dee538c2035f49b5f03bc3f737ce3290ca8aba32a6e: Status 404 returned error can't find the container with id 767f7881dadc02cf9d6b3dee538c2035f49b5f03bc3f737ce3290ca8aba32a6e Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.938037 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/693ab8f6-3c6b-4aea-a488-6b9b17bcd249-tls-key-pair\") pod \"nmstate-webhook-5f558f5558-gss2p\" (UID: \"693ab8f6-3c6b-4aea-a488-6b9b17bcd249\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:18 crc kubenswrapper[4690]: I0320 13:37:18.942953 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/693ab8f6-3c6b-4aea-a488-6b9b17bcd249-tls-key-pair\") pod \"nmstate-webhook-5f558f5558-gss2p\" (UID: \"693ab8f6-3c6b-4aea-a488-6b9b17bcd249\") " pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:19 crc kubenswrapper[4690]: I0320 13:37:19.083629 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-6fd47b444f-wv59m"] Mar 20 13:37:19 crc kubenswrapper[4690]: W0320 13:37:19.089940 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24b6a2f3_4e48_4e0c_bd1a_b23ad2fd12c4.slice/crio-841726c830b14fd1b8ccb7ad97bc54a6fcb707eb69916345bf2eadb9c6aabb2c WatchSource:0}: Error finding container 841726c830b14fd1b8ccb7ad97bc54a6fcb707eb69916345bf2eadb9c6aabb2c: Status 404 returned error can't find the container with id 841726c830b14fd1b8ccb7ad97bc54a6fcb707eb69916345bf2eadb9c6aabb2c Mar 20 13:37:19 crc kubenswrapper[4690]: I0320 13:37:19.125692 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:19 crc kubenswrapper[4690]: I0320 13:37:19.378563 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-fbbgx" event={"ID":"f27e925b-b583-4ebb-9c76-8b94e717572b","Type":"ContainerStarted","Data":"c33f693a53638d328a8067d877d24693959df0be5020643fdc6bb5a86efc335a"} Mar 20 13:37:19 crc kubenswrapper[4690]: I0320 13:37:19.379777 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" event={"ID":"96ef8c08-d917-4818-aa0b-a8f40e03d5af","Type":"ContainerStarted","Data":"5c38a0eb8ea549f255027d6cfa15e0434bf9a68052c71f0a682c220a6320bd4e"} Mar 20 13:37:19 crc kubenswrapper[4690]: I0320 13:37:19.381266 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k" event={"ID":"7b110386-4970-4b99-ab60-783130776002","Type":"ContainerStarted","Data":"767f7881dadc02cf9d6b3dee538c2035f49b5f03bc3f737ce3290ca8aba32a6e"} Mar 20 13:37:19 crc kubenswrapper[4690]: I0320 13:37:19.382479 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6fd47b444f-wv59m" event={"ID":"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4","Type":"ContainerStarted","Data":"841726c830b14fd1b8ccb7ad97bc54a6fcb707eb69916345bf2eadb9c6aabb2c"} Mar 20 13:37:19 crc kubenswrapper[4690]: I0320 13:37:19.604887 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f558f5558-gss2p"] Mar 20 13:37:19 crc kubenswrapper[4690]: W0320 13:37:19.612290 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod693ab8f6_3c6b_4aea_a488_6b9b17bcd249.slice/crio-858a035a88dca26db75d168ba950ba0aca23be64d0d6e2cabe9ea07dfd1b570c WatchSource:0}: Error finding container 858a035a88dca26db75d168ba950ba0aca23be64d0d6e2cabe9ea07dfd1b570c: Status 404 returned error can't find the container with id 858a035a88dca26db75d168ba950ba0aca23be64d0d6e2cabe9ea07dfd1b570c Mar 20 13:37:20 crc kubenswrapper[4690]: I0320 13:37:20.390391 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" event={"ID":"693ab8f6-3c6b-4aea-a488-6b9b17bcd249","Type":"ContainerStarted","Data":"858a035a88dca26db75d168ba950ba0aca23be64d0d6e2cabe9ea07dfd1b570c"} Mar 20 13:37:20 crc kubenswrapper[4690]: I0320 13:37:20.393588 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-6fd47b444f-wv59m" event={"ID":"24b6a2f3-4e48-4e0c-bd1a-b23ad2fd12c4","Type":"ContainerStarted","Data":"3a5906d10aa3fe555dec6617d20a2201fc97dffba459b6553703d080ad3eaaa0"} Mar 20 13:37:20 crc kubenswrapper[4690]: I0320 13:37:20.420808 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-6fd47b444f-wv59m" podStartSLOduration=2.420791378 podStartE2EDuration="2.420791378s" podCreationTimestamp="2026-03-20 13:37:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:37:20.417367132 +0000 UTC m=+886.706967105" watchObservedRunningTime="2026-03-20 13:37:20.420791378 +0000 UTC m=+886.710391331" Mar 20 13:37:20 crc kubenswrapper[4690]: I0320 13:37:20.713440 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:20 crc kubenswrapper[4690]: I0320 13:37:20.768832 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:20 crc kubenswrapper[4690]: I0320 13:37:20.949964 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fz2qz"] Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.406623 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" event={"ID":"693ab8f6-3c6b-4aea-a488-6b9b17bcd249","Type":"ContainerStarted","Data":"1cb5fc053edab5b73fc2bd202e8829c0fc2142dc1a1f44f29b9bf7959dc42a8e"} Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.407289 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.410165 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-fbbgx" event={"ID":"f27e925b-b583-4ebb-9c76-8b94e717572b","Type":"ContainerStarted","Data":"a77133332060e2b0447934756f80092b2164991f8bdfd125e5956b614e73a259"} Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.410242 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.411911 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" event={"ID":"96ef8c08-d917-4818-aa0b-a8f40e03d5af","Type":"ContainerStarted","Data":"7fd49010159b9fe18efae031689f58018f06cd0e97f468f1fd77d58e69327459"} Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.413234 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fz2qz" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerName="registry-server" containerID="cri-o://c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51" gracePeriod=2 Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.413527 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k" event={"ID":"7b110386-4970-4b99-ab60-783130776002","Type":"ContainerStarted","Data":"5c6e43e4ef0b9db5474b7fd8ef366c8712b73f77691389e48b051afb6e79e3ca"} Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.431832 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" podStartSLOduration=2.121431992 podStartE2EDuration="4.431810562s" podCreationTimestamp="2026-03-20 13:37:18 +0000 UTC" firstStartedPulling="2026-03-20 13:37:19.615339119 +0000 UTC m=+885.904939082" lastFinishedPulling="2026-03-20 13:37:21.925717679 +0000 UTC m=+888.215317652" observedRunningTime="2026-03-20 13:37:22.430884976 +0000 UTC m=+888.720484919" watchObservedRunningTime="2026-03-20 13:37:22.431810562 +0000 UTC m=+888.721410525" Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.459900 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-86f58fcf4-8qxkm" podStartSLOduration=1.374534294 podStartE2EDuration="4.459874643s" podCreationTimestamp="2026-03-20 13:37:18 +0000 UTC" firstStartedPulling="2026-03-20 13:37:18.806621268 +0000 UTC m=+885.096221211" lastFinishedPulling="2026-03-20 13:37:21.891961577 +0000 UTC m=+888.181561560" observedRunningTime="2026-03-20 13:37:22.448104991 +0000 UTC m=+888.737704954" watchObservedRunningTime="2026-03-20 13:37:22.459874643 +0000 UTC m=+888.749474586" Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.478931 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-fbbgx" podStartSLOduration=1.12709472 podStartE2EDuration="4.478902109s" podCreationTimestamp="2026-03-20 13:37:18 +0000 UTC" firstStartedPulling="2026-03-20 13:37:18.540123087 +0000 UTC m=+884.829723030" lastFinishedPulling="2026-03-20 13:37:21.891930436 +0000 UTC m=+888.181530419" observedRunningTime="2026-03-20 13:37:22.472945771 +0000 UTC m=+888.762545724" watchObservedRunningTime="2026-03-20 13:37:22.478902109 +0000 UTC m=+888.768502062" Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.721714 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.896591 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-utilities\") pod \"eca51b99-1947-44d0-a86c-d8f0302337ba\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.896731 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-catalog-content\") pod \"eca51b99-1947-44d0-a86c-d8f0302337ba\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.896772 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7d9lx\" (UniqueName: \"kubernetes.io/projected/eca51b99-1947-44d0-a86c-d8f0302337ba-kube-api-access-7d9lx\") pod \"eca51b99-1947-44d0-a86c-d8f0302337ba\" (UID: \"eca51b99-1947-44d0-a86c-d8f0302337ba\") " Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.898171 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-utilities" (OuterVolumeSpecName: "utilities") pod "eca51b99-1947-44d0-a86c-d8f0302337ba" (UID: "eca51b99-1947-44d0-a86c-d8f0302337ba"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.905141 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eca51b99-1947-44d0-a86c-d8f0302337ba-kube-api-access-7d9lx" (OuterVolumeSpecName: "kube-api-access-7d9lx") pod "eca51b99-1947-44d0-a86c-d8f0302337ba" (UID: "eca51b99-1947-44d0-a86c-d8f0302337ba"). InnerVolumeSpecName "kube-api-access-7d9lx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.998952 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7d9lx\" (UniqueName: \"kubernetes.io/projected/eca51b99-1947-44d0-a86c-d8f0302337ba-kube-api-access-7d9lx\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:22 crc kubenswrapper[4690]: I0320 13:37:22.999002 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.070352 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eca51b99-1947-44d0-a86c-d8f0302337ba" (UID: "eca51b99-1947-44d0-a86c-d8f0302337ba"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.099778 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eca51b99-1947-44d0-a86c-d8f0302337ba-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.422655 4690 generic.go:334] "Generic (PLEG): container finished" podID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerID="c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51" exitCode=0 Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.422731 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fz2qz" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.422773 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz2qz" event={"ID":"eca51b99-1947-44d0-a86c-d8f0302337ba","Type":"ContainerDied","Data":"c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51"} Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.422886 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fz2qz" event={"ID":"eca51b99-1947-44d0-a86c-d8f0302337ba","Type":"ContainerDied","Data":"997675ace485b9c59b3c76c2e7f78b4a65a9bfaf35eaada4df8504529ea9a98b"} Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.422929 4690 scope.go:117] "RemoveContainer" containerID="c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.445360 4690 scope.go:117] "RemoveContainer" containerID="7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.460641 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fz2qz"] Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.469211 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fz2qz"] Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.482159 4690 scope.go:117] "RemoveContainer" containerID="d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.498934 4690 scope.go:117] "RemoveContainer" containerID="c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51" Mar 20 13:37:23 crc kubenswrapper[4690]: E0320 13:37:23.499979 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51\": container with ID starting with c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51 not found: ID does not exist" containerID="c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.500010 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51"} err="failed to get container status \"c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51\": rpc error: code = NotFound desc = could not find container \"c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51\": container with ID starting with c9e842722e48cb3124117b6d14521bacfab9c72986715f438450a82cf5106c51 not found: ID does not exist" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.500029 4690 scope.go:117] "RemoveContainer" containerID="7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e" Mar 20 13:37:23 crc kubenswrapper[4690]: E0320 13:37:23.500471 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e\": container with ID starting with 7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e not found: ID does not exist" containerID="7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.500500 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e"} err="failed to get container status \"7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e\": rpc error: code = NotFound desc = could not find container \"7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e\": container with ID starting with 7fc36421f81434af400de84bc2d1e11127bb4b1c71e87b0629ffc7d482fcdf8e not found: ID does not exist" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.500517 4690 scope.go:117] "RemoveContainer" containerID="d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237" Mar 20 13:37:23 crc kubenswrapper[4690]: E0320 13:37:23.502510 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237\": container with ID starting with d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237 not found: ID does not exist" containerID="d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237" Mar 20 13:37:23 crc kubenswrapper[4690]: I0320 13:37:23.502550 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237"} err="failed to get container status \"d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237\": rpc error: code = NotFound desc = could not find container \"d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237\": container with ID starting with d3b68f147d6ea0569df539c9eeae74e666b8040666e8d4902dd23778d39bd237 not found: ID does not exist" Mar 20 13:37:24 crc kubenswrapper[4690]: I0320 13:37:24.422278 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" path="/var/lib/kubelet/pods/eca51b99-1947-44d0-a86c-d8f0302337ba/volumes" Mar 20 13:37:25 crc kubenswrapper[4690]: I0320 13:37:25.444158 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k" event={"ID":"7b110386-4970-4b99-ab60-783130776002","Type":"ContainerStarted","Data":"4a7def96924d50b4e289b448e4fb864e0f6c2e0bba9cfd670ecf979cec07c757"} Mar 20 13:37:25 crc kubenswrapper[4690]: I0320 13:37:25.473470 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-9b8c8685d-njk8k" podStartSLOduration=1.824254767 podStartE2EDuration="7.47343739s" podCreationTimestamp="2026-03-20 13:37:18 +0000 UTC" firstStartedPulling="2026-03-20 13:37:18.913763217 +0000 UTC m=+885.203363170" lastFinishedPulling="2026-03-20 13:37:24.56294584 +0000 UTC m=+890.852545793" observedRunningTime="2026-03-20 13:37:25.470829326 +0000 UTC m=+891.760429299" watchObservedRunningTime="2026-03-20 13:37:25.47343739 +0000 UTC m=+891.763037373" Mar 20 13:37:28 crc kubenswrapper[4690]: I0320 13:37:28.588417 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-fbbgx" Mar 20 13:37:28 crc kubenswrapper[4690]: I0320 13:37:28.871020 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:28 crc kubenswrapper[4690]: I0320 13:37:28.871117 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:28 crc kubenswrapper[4690]: I0320 13:37:28.878323 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:29 crc kubenswrapper[4690]: I0320 13:37:29.571831 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-6fd47b444f-wv59m" Mar 20 13:37:29 crc kubenswrapper[4690]: I0320 13:37:29.648026 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-h2jxx"] Mar 20 13:37:33 crc kubenswrapper[4690]: I0320 13:37:33.830196 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:37:33 crc kubenswrapper[4690]: I0320 13:37:33.830614 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:37:39 crc kubenswrapper[4690]: I0320 13:37:39.134926 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f558f5558-gss2p" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.255657 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-q7pkr"] Mar 20 13:37:43 crc kubenswrapper[4690]: E0320 13:37:43.256335 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerName="registry-server" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.256357 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerName="registry-server" Mar 20 13:37:43 crc kubenswrapper[4690]: E0320 13:37:43.256395 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerName="extract-utilities" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.256408 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerName="extract-utilities" Mar 20 13:37:43 crc kubenswrapper[4690]: E0320 13:37:43.256426 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerName="extract-content" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.256439 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerName="extract-content" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.256640 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="eca51b99-1947-44d0-a86c-d8f0302337ba" containerName="registry-server" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.258629 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.265289 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q7pkr"] Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.375755 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-catalog-content\") pod \"community-operators-q7pkr\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.375824 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpj7c\" (UniqueName: \"kubernetes.io/projected/9ddfdc74-04e3-4441-b0fb-7a3fac192438-kube-api-access-zpj7c\") pod \"community-operators-q7pkr\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.375908 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-utilities\") pod \"community-operators-q7pkr\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.477446 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-catalog-content\") pod \"community-operators-q7pkr\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.477525 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpj7c\" (UniqueName: \"kubernetes.io/projected/9ddfdc74-04e3-4441-b0fb-7a3fac192438-kube-api-access-zpj7c\") pod \"community-operators-q7pkr\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.477559 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-utilities\") pod \"community-operators-q7pkr\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.477978 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-catalog-content\") pod \"community-operators-q7pkr\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.478081 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-utilities\") pod \"community-operators-q7pkr\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.500905 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpj7c\" (UniqueName: \"kubernetes.io/projected/9ddfdc74-04e3-4441-b0fb-7a3fac192438-kube-api-access-zpj7c\") pod \"community-operators-q7pkr\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.593297 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:43 crc kubenswrapper[4690]: I0320 13:37:43.872577 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-q7pkr"] Mar 20 13:37:44 crc kubenswrapper[4690]: I0320 13:37:44.678163 4690 generic.go:334] "Generic (PLEG): container finished" podID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerID="17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716" exitCode=0 Mar 20 13:37:44 crc kubenswrapper[4690]: I0320 13:37:44.678231 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7pkr" event={"ID":"9ddfdc74-04e3-4441-b0fb-7a3fac192438","Type":"ContainerDied","Data":"17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716"} Mar 20 13:37:44 crc kubenswrapper[4690]: I0320 13:37:44.678306 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7pkr" event={"ID":"9ddfdc74-04e3-4441-b0fb-7a3fac192438","Type":"ContainerStarted","Data":"f6889707139be5a4937f04582c95f8aece57a68bda6c509dafb21b2cf36515d5"} Mar 20 13:37:45 crc kubenswrapper[4690]: I0320 13:37:45.687693 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7pkr" event={"ID":"9ddfdc74-04e3-4441-b0fb-7a3fac192438","Type":"ContainerStarted","Data":"573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82"} Mar 20 13:37:46 crc kubenswrapper[4690]: I0320 13:37:46.697645 4690 generic.go:334] "Generic (PLEG): container finished" podID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerID="573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82" exitCode=0 Mar 20 13:37:46 crc kubenswrapper[4690]: I0320 13:37:46.697731 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7pkr" event={"ID":"9ddfdc74-04e3-4441-b0fb-7a3fac192438","Type":"ContainerDied","Data":"573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82"} Mar 20 13:37:47 crc kubenswrapper[4690]: I0320 13:37:47.709268 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7pkr" event={"ID":"9ddfdc74-04e3-4441-b0fb-7a3fac192438","Type":"ContainerStarted","Data":"3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2"} Mar 20 13:37:47 crc kubenswrapper[4690]: I0320 13:37:47.740241 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-q7pkr" podStartSLOduration=2.287318661 podStartE2EDuration="4.740223607s" podCreationTimestamp="2026-03-20 13:37:43 +0000 UTC" firstStartedPulling="2026-03-20 13:37:44.679824731 +0000 UTC m=+910.969424674" lastFinishedPulling="2026-03-20 13:37:47.132729657 +0000 UTC m=+913.422329620" observedRunningTime="2026-03-20 13:37:47.734244309 +0000 UTC m=+914.023844272" watchObservedRunningTime="2026-03-20 13:37:47.740223607 +0000 UTC m=+914.029823550" Mar 20 13:37:53 crc kubenswrapper[4690]: I0320 13:37:53.594534 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:53 crc kubenswrapper[4690]: I0320 13:37:53.596591 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:53 crc kubenswrapper[4690]: I0320 13:37:53.652249 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:53 crc kubenswrapper[4690]: I0320 13:37:53.796755 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:53 crc kubenswrapper[4690]: I0320 13:37:53.901043 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q7pkr"] Mar 20 13:37:54 crc kubenswrapper[4690]: I0320 13:37:54.693616 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-h2jxx" podUID="74952b15-473b-462f-a05f-6c00433ed4d5" containerName="console" containerID="cri-o://f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2" gracePeriod=15 Mar 20 13:37:54 crc kubenswrapper[4690]: I0320 13:37:54.780463 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm"] Mar 20 13:37:54 crc kubenswrapper[4690]: I0320 13:37:54.782517 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:54 crc kubenswrapper[4690]: I0320 13:37:54.785259 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Mar 20 13:37:54 crc kubenswrapper[4690]: I0320 13:37:54.796002 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm"] Mar 20 13:37:54 crc kubenswrapper[4690]: I0320 13:37:54.947355 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8szr8\" (UniqueName: \"kubernetes.io/projected/46209210-259c-4c0a-96e7-596a1f975b2d-kube-api-access-8szr8\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:54 crc kubenswrapper[4690]: I0320 13:37:54.947783 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-util\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:54 crc kubenswrapper[4690]: I0320 13:37:54.947820 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-bundle\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.048710 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8szr8\" (UniqueName: \"kubernetes.io/projected/46209210-259c-4c0a-96e7-596a1f975b2d-kube-api-access-8szr8\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.048760 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-util\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.048786 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-bundle\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.049257 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-bundle\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.049351 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-util\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.067585 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8szr8\" (UniqueName: \"kubernetes.io/projected/46209210-259c-4c0a-96e7-596a1f975b2d-kube-api-access-8szr8\") pod \"2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.107494 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-h2jxx_74952b15-473b-462f-a05f-6c00433ed4d5/console/0.log" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.107558 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.161027 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.250305 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-trusted-ca-bundle\") pod \"74952b15-473b-462f-a05f-6c00433ed4d5\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.250562 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-console-config\") pod \"74952b15-473b-462f-a05f-6c00433ed4d5\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.250631 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-serving-cert\") pod \"74952b15-473b-462f-a05f-6c00433ed4d5\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.250691 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-oauth-serving-cert\") pod \"74952b15-473b-462f-a05f-6c00433ed4d5\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.250765 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-oauth-config\") pod \"74952b15-473b-462f-a05f-6c00433ed4d5\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.250816 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mp887\" (UniqueName: \"kubernetes.io/projected/74952b15-473b-462f-a05f-6c00433ed4d5-kube-api-access-mp887\") pod \"74952b15-473b-462f-a05f-6c00433ed4d5\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.250871 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-service-ca\") pod \"74952b15-473b-462f-a05f-6c00433ed4d5\" (UID: \"74952b15-473b-462f-a05f-6c00433ed4d5\") " Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.251900 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-service-ca" (OuterVolumeSpecName: "service-ca") pod "74952b15-473b-462f-a05f-6c00433ed4d5" (UID: "74952b15-473b-462f-a05f-6c00433ed4d5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.252326 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "74952b15-473b-462f-a05f-6c00433ed4d5" (UID: "74952b15-473b-462f-a05f-6c00433ed4d5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.253618 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "74952b15-473b-462f-a05f-6c00433ed4d5" (UID: "74952b15-473b-462f-a05f-6c00433ed4d5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.253880 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-console-config" (OuterVolumeSpecName: "console-config") pod "74952b15-473b-462f-a05f-6c00433ed4d5" (UID: "74952b15-473b-462f-a05f-6c00433ed4d5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.256688 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "74952b15-473b-462f-a05f-6c00433ed4d5" (UID: "74952b15-473b-462f-a05f-6c00433ed4d5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.258533 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "74952b15-473b-462f-a05f-6c00433ed4d5" (UID: "74952b15-473b-462f-a05f-6c00433ed4d5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.258768 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74952b15-473b-462f-a05f-6c00433ed4d5-kube-api-access-mp887" (OuterVolumeSpecName: "kube-api-access-mp887") pod "74952b15-473b-462f-a05f-6c00433ed4d5" (UID: "74952b15-473b-462f-a05f-6c00433ed4d5"). InnerVolumeSpecName "kube-api-access-mp887". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.351806 4690 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.351859 4690 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.351874 4690 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/74952b15-473b-462f-a05f-6c00433ed4d5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.351883 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mp887\" (UniqueName: \"kubernetes.io/projected/74952b15-473b-462f-a05f-6c00433ed4d5-kube-api-access-mp887\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.351894 4690 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-service-ca\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.351901 4690 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.351909 4690 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/74952b15-473b-462f-a05f-6c00433ed4d5-console-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.365768 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm"] Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.768362 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-h2jxx_74952b15-473b-462f-a05f-6c00433ed4d5/console/0.log" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.768453 4690 generic.go:334] "Generic (PLEG): container finished" podID="74952b15-473b-462f-a05f-6c00433ed4d5" containerID="f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2" exitCode=2 Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.768541 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-h2jxx" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.768568 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h2jxx" event={"ID":"74952b15-473b-462f-a05f-6c00433ed4d5","Type":"ContainerDied","Data":"f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2"} Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.768610 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-h2jxx" event={"ID":"74952b15-473b-462f-a05f-6c00433ed4d5","Type":"ContainerDied","Data":"6c815e07f71eaef0334600e27b15dcb578b4717bf3326d08155cd22c1ad3b139"} Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.768640 4690 scope.go:117] "RemoveContainer" containerID="f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.772050 4690 generic.go:334] "Generic (PLEG): container finished" podID="46209210-259c-4c0a-96e7-596a1f975b2d" containerID="6a795c502133c847c4d93e33ea696a92b3a568967267660a6dac8df975d674d2" exitCode=0 Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.772329 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-q7pkr" podUID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerName="registry-server" containerID="cri-o://3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2" gracePeriod=2 Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.773416 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" event={"ID":"46209210-259c-4c0a-96e7-596a1f975b2d","Type":"ContainerDied","Data":"6a795c502133c847c4d93e33ea696a92b3a568967267660a6dac8df975d674d2"} Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.773463 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" event={"ID":"46209210-259c-4c0a-96e7-596a1f975b2d","Type":"ContainerStarted","Data":"b5a20114151589867977b6024884aac4007a9391b5ef915b5c5f2dd999fa78a8"} Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.799819 4690 scope.go:117] "RemoveContainer" containerID="f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2" Mar 20 13:37:55 crc kubenswrapper[4690]: E0320 13:37:55.804159 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2\": container with ID starting with f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2 not found: ID does not exist" containerID="f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.804225 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2"} err="failed to get container status \"f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2\": rpc error: code = NotFound desc = could not find container \"f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2\": container with ID starting with f9ee9425898d2d824e502a78c99bedc312c80fd0516c353cf71ad9dc57a600a2 not found: ID does not exist" Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.824383 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-h2jxx"] Mar 20 13:37:55 crc kubenswrapper[4690]: I0320 13:37:55.828984 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-h2jxx"] Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.151471 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.264693 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-utilities\") pod \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.264790 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-catalog-content\") pod \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.264884 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zpj7c\" (UniqueName: \"kubernetes.io/projected/9ddfdc74-04e3-4441-b0fb-7a3fac192438-kube-api-access-zpj7c\") pod \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\" (UID: \"9ddfdc74-04e3-4441-b0fb-7a3fac192438\") " Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.265583 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-utilities" (OuterVolumeSpecName: "utilities") pod "9ddfdc74-04e3-4441-b0fb-7a3fac192438" (UID: "9ddfdc74-04e3-4441-b0fb-7a3fac192438"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.268100 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ddfdc74-04e3-4441-b0fb-7a3fac192438-kube-api-access-zpj7c" (OuterVolumeSpecName: "kube-api-access-zpj7c") pod "9ddfdc74-04e3-4441-b0fb-7a3fac192438" (UID: "9ddfdc74-04e3-4441-b0fb-7a3fac192438"). InnerVolumeSpecName "kube-api-access-zpj7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.368149 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zpj7c\" (UniqueName: \"kubernetes.io/projected/9ddfdc74-04e3-4441-b0fb-7a3fac192438-kube-api-access-zpj7c\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.368212 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.378298 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9ddfdc74-04e3-4441-b0fb-7a3fac192438" (UID: "9ddfdc74-04e3-4441-b0fb-7a3fac192438"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.430139 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74952b15-473b-462f-a05f-6c00433ed4d5" path="/var/lib/kubelet/pods/74952b15-473b-462f-a05f-6c00433ed4d5/volumes" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.469552 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9ddfdc74-04e3-4441-b0fb-7a3fac192438-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.782952 4690 generic.go:334] "Generic (PLEG): container finished" podID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerID="3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2" exitCode=0 Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.783020 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7pkr" event={"ID":"9ddfdc74-04e3-4441-b0fb-7a3fac192438","Type":"ContainerDied","Data":"3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2"} Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.783049 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-q7pkr" event={"ID":"9ddfdc74-04e3-4441-b0fb-7a3fac192438","Type":"ContainerDied","Data":"f6889707139be5a4937f04582c95f8aece57a68bda6c509dafb21b2cf36515d5"} Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.783049 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-q7pkr" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.783069 4690 scope.go:117] "RemoveContainer" containerID="3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.804151 4690 scope.go:117] "RemoveContainer" containerID="573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.810633 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-q7pkr"] Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.814076 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-q7pkr"] Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.820828 4690 scope.go:117] "RemoveContainer" containerID="17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.857254 4690 scope.go:117] "RemoveContainer" containerID="3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2" Mar 20 13:37:56 crc kubenswrapper[4690]: E0320 13:37:56.858107 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2\": container with ID starting with 3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2 not found: ID does not exist" containerID="3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.858240 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2"} err="failed to get container status \"3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2\": rpc error: code = NotFound desc = could not find container \"3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2\": container with ID starting with 3dd90ec9d62b6e213d64bf217952e29d835a37d55018b4fc304c9b603697e0b2 not found: ID does not exist" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.858366 4690 scope.go:117] "RemoveContainer" containerID="573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82" Mar 20 13:37:56 crc kubenswrapper[4690]: E0320 13:37:56.859004 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82\": container with ID starting with 573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82 not found: ID does not exist" containerID="573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.859037 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82"} err="failed to get container status \"573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82\": rpc error: code = NotFound desc = could not find container \"573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82\": container with ID starting with 573ff725833f305eb61c75d72396a78ae003cf955632ebdf0b6aed4d7a372b82 not found: ID does not exist" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.859058 4690 scope.go:117] "RemoveContainer" containerID="17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716" Mar 20 13:37:56 crc kubenswrapper[4690]: E0320 13:37:56.859381 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716\": container with ID starting with 17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716 not found: ID does not exist" containerID="17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716" Mar 20 13:37:56 crc kubenswrapper[4690]: I0320 13:37:56.859524 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716"} err="failed to get container status \"17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716\": rpc error: code = NotFound desc = could not find container \"17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716\": container with ID starting with 17fc247a2488f7b25e19e24f00e7e5f4603c22580b55dcc24d18b2ed99fc7716 not found: ID does not exist" Mar 20 13:37:57 crc kubenswrapper[4690]: I0320 13:37:57.796206 4690 generic.go:334] "Generic (PLEG): container finished" podID="46209210-259c-4c0a-96e7-596a1f975b2d" containerID="61b2f2a89f8a2891dfcec9aaa5ac7e953ef424812f4fc901c79a05636cbd7a5e" exitCode=0 Mar 20 13:37:57 crc kubenswrapper[4690]: I0320 13:37:57.796312 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" event={"ID":"46209210-259c-4c0a-96e7-596a1f975b2d","Type":"ContainerDied","Data":"61b2f2a89f8a2891dfcec9aaa5ac7e953ef424812f4fc901c79a05636cbd7a5e"} Mar 20 13:37:58 crc kubenswrapper[4690]: I0320 13:37:58.429016 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" path="/var/lib/kubelet/pods/9ddfdc74-04e3-4441-b0fb-7a3fac192438/volumes" Mar 20 13:37:58 crc kubenswrapper[4690]: I0320 13:37:58.809304 4690 generic.go:334] "Generic (PLEG): container finished" podID="46209210-259c-4c0a-96e7-596a1f975b2d" containerID="5ce29483dc03681f0275087613eabbd6fa5e2669efc0ab873776333d388ca1a1" exitCode=0 Mar 20 13:37:58 crc kubenswrapper[4690]: I0320 13:37:58.809405 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" event={"ID":"46209210-259c-4c0a-96e7-596a1f975b2d","Type":"ContainerDied","Data":"5ce29483dc03681f0275087613eabbd6fa5e2669efc0ab873776333d388ca1a1"} Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.117488 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156072 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566898-244jx"] Mar 20 13:38:00 crc kubenswrapper[4690]: E0320 13:38:00.156323 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerName="registry-server" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156342 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerName="registry-server" Mar 20 13:38:00 crc kubenswrapper[4690]: E0320 13:38:00.156360 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerName="extract-utilities" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156370 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerName="extract-utilities" Mar 20 13:38:00 crc kubenswrapper[4690]: E0320 13:38:00.156389 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46209210-259c-4c0a-96e7-596a1f975b2d" containerName="util" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156397 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="46209210-259c-4c0a-96e7-596a1f975b2d" containerName="util" Mar 20 13:38:00 crc kubenswrapper[4690]: E0320 13:38:00.156407 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74952b15-473b-462f-a05f-6c00433ed4d5" containerName="console" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156415 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="74952b15-473b-462f-a05f-6c00433ed4d5" containerName="console" Mar 20 13:38:00 crc kubenswrapper[4690]: E0320 13:38:00.156425 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerName="extract-content" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156432 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerName="extract-content" Mar 20 13:38:00 crc kubenswrapper[4690]: E0320 13:38:00.156447 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46209210-259c-4c0a-96e7-596a1f975b2d" containerName="extract" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156454 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="46209210-259c-4c0a-96e7-596a1f975b2d" containerName="extract" Mar 20 13:38:00 crc kubenswrapper[4690]: E0320 13:38:00.156467 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46209210-259c-4c0a-96e7-596a1f975b2d" containerName="pull" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156474 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="46209210-259c-4c0a-96e7-596a1f975b2d" containerName="pull" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156585 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="46209210-259c-4c0a-96e7-596a1f975b2d" containerName="extract" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156603 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ddfdc74-04e3-4441-b0fb-7a3fac192438" containerName="registry-server" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.156616 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="74952b15-473b-462f-a05f-6c00433ed4d5" containerName="console" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.157105 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566898-244jx" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.159348 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.159948 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.160778 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.181405 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566898-244jx"] Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.219515 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-bundle\") pod \"46209210-259c-4c0a-96e7-596a1f975b2d\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.219598 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-util\") pod \"46209210-259c-4c0a-96e7-596a1f975b2d\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.219689 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8szr8\" (UniqueName: \"kubernetes.io/projected/46209210-259c-4c0a-96e7-596a1f975b2d-kube-api-access-8szr8\") pod \"46209210-259c-4c0a-96e7-596a1f975b2d\" (UID: \"46209210-259c-4c0a-96e7-596a1f975b2d\") " Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.220630 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-bundle" (OuterVolumeSpecName: "bundle") pod "46209210-259c-4c0a-96e7-596a1f975b2d" (UID: "46209210-259c-4c0a-96e7-596a1f975b2d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.225157 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46209210-259c-4c0a-96e7-596a1f975b2d-kube-api-access-8szr8" (OuterVolumeSpecName: "kube-api-access-8szr8") pod "46209210-259c-4c0a-96e7-596a1f975b2d" (UID: "46209210-259c-4c0a-96e7-596a1f975b2d"). InnerVolumeSpecName "kube-api-access-8szr8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.240167 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-util" (OuterVolumeSpecName: "util") pod "46209210-259c-4c0a-96e7-596a1f975b2d" (UID: "46209210-259c-4c0a-96e7-596a1f975b2d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.321880 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lvrzd\" (UniqueName: \"kubernetes.io/projected/5702a850-32be-49da-bd61-58a9f6088792-kube-api-access-lvrzd\") pod \"auto-csr-approver-29566898-244jx\" (UID: \"5702a850-32be-49da-bd61-58a9f6088792\") " pod="openshift-infra/auto-csr-approver-29566898-244jx" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.322198 4690 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.322229 4690 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/46209210-259c-4c0a-96e7-596a1f975b2d-util\") on node \"crc\" DevicePath \"\"" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.322243 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8szr8\" (UniqueName: \"kubernetes.io/projected/46209210-259c-4c0a-96e7-596a1f975b2d-kube-api-access-8szr8\") on node \"crc\" DevicePath \"\"" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.423964 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lvrzd\" (UniqueName: \"kubernetes.io/projected/5702a850-32be-49da-bd61-58a9f6088792-kube-api-access-lvrzd\") pod \"auto-csr-approver-29566898-244jx\" (UID: \"5702a850-32be-49da-bd61-58a9f6088792\") " pod="openshift-infra/auto-csr-approver-29566898-244jx" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.447137 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lvrzd\" (UniqueName: \"kubernetes.io/projected/5702a850-32be-49da-bd61-58a9f6088792-kube-api-access-lvrzd\") pod \"auto-csr-approver-29566898-244jx\" (UID: \"5702a850-32be-49da-bd61-58a9f6088792\") " pod="openshift-infra/auto-csr-approver-29566898-244jx" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.475828 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566898-244jx" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.724993 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566898-244jx"] Mar 20 13:38:00 crc kubenswrapper[4690]: W0320 13:38:00.729244 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5702a850_32be_49da_bd61_58a9f6088792.slice/crio-4eb661b64c28a35a9611cd03049e6e8208a03cf10111f61b4fbc9e8a5a0ed14b WatchSource:0}: Error finding container 4eb661b64c28a35a9611cd03049e6e8208a03cf10111f61b4fbc9e8a5a0ed14b: Status 404 returned error can't find the container with id 4eb661b64c28a35a9611cd03049e6e8208a03cf10111f61b4fbc9e8a5a0ed14b Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.834580 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" event={"ID":"46209210-259c-4c0a-96e7-596a1f975b2d","Type":"ContainerDied","Data":"b5a20114151589867977b6024884aac4007a9391b5ef915b5c5f2dd999fa78a8"} Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.834633 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.834643 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5a20114151589867977b6024884aac4007a9391b5ef915b5c5f2dd999fa78a8" Mar 20 13:38:00 crc kubenswrapper[4690]: I0320 13:38:00.835629 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566898-244jx" event={"ID":"5702a850-32be-49da-bd61-58a9f6088792","Type":"ContainerStarted","Data":"4eb661b64c28a35a9611cd03049e6e8208a03cf10111f61b4fbc9e8a5a0ed14b"} Mar 20 13:38:02 crc kubenswrapper[4690]: I0320 13:38:02.856800 4690 generic.go:334] "Generic (PLEG): container finished" podID="5702a850-32be-49da-bd61-58a9f6088792" containerID="4d9b99253e553f43b512a97cc9c617f83e42f1a1449e0528efd23f006f78f359" exitCode=0 Mar 20 13:38:02 crc kubenswrapper[4690]: I0320 13:38:02.856891 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566898-244jx" event={"ID":"5702a850-32be-49da-bd61-58a9f6088792","Type":"ContainerDied","Data":"4d9b99253e553f43b512a97cc9c617f83e42f1a1449e0528efd23f006f78f359"} Mar 20 13:38:03 crc kubenswrapper[4690]: I0320 13:38:03.829490 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:38:03 crc kubenswrapper[4690]: I0320 13:38:03.829549 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:38:03 crc kubenswrapper[4690]: I0320 13:38:03.829587 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:38:03 crc kubenswrapper[4690]: I0320 13:38:03.830113 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a92e622601d5d0cbc8d5ec8266b1e5ffd0ed3023dc04d14e7b0e5bdc6a68783b"} pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 13:38:03 crc kubenswrapper[4690]: I0320 13:38:03.830159 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" containerID="cri-o://a92e622601d5d0cbc8d5ec8266b1e5ffd0ed3023dc04d14e7b0e5bdc6a68783b" gracePeriod=600 Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.094704 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566898-244jx" Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.279197 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lvrzd\" (UniqueName: \"kubernetes.io/projected/5702a850-32be-49da-bd61-58a9f6088792-kube-api-access-lvrzd\") pod \"5702a850-32be-49da-bd61-58a9f6088792\" (UID: \"5702a850-32be-49da-bd61-58a9f6088792\") " Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.284395 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5702a850-32be-49da-bd61-58a9f6088792-kube-api-access-lvrzd" (OuterVolumeSpecName: "kube-api-access-lvrzd") pod "5702a850-32be-49da-bd61-58a9f6088792" (UID: "5702a850-32be-49da-bd61-58a9f6088792"). InnerVolumeSpecName "kube-api-access-lvrzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.382002 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lvrzd\" (UniqueName: \"kubernetes.io/projected/5702a850-32be-49da-bd61-58a9f6088792-kube-api-access-lvrzd\") on node \"crc\" DevicePath \"\"" Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.869274 4690 generic.go:334] "Generic (PLEG): container finished" podID="60ded650-b298-4115-8286-8969b94d4062" containerID="a92e622601d5d0cbc8d5ec8266b1e5ffd0ed3023dc04d14e7b0e5bdc6a68783b" exitCode=0 Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.869353 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerDied","Data":"a92e622601d5d0cbc8d5ec8266b1e5ffd0ed3023dc04d14e7b0e5bdc6a68783b"} Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.869699 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"1cbfcd3ff515926ff968741597036e24520ad18c5c3213b253b31335bc1c23e8"} Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.869725 4690 scope.go:117] "RemoveContainer" containerID="967fda2103017b22f1f6e626bb333638e17f0d3e154429ee6d859d0e073cb2cc" Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.872212 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566898-244jx" event={"ID":"5702a850-32be-49da-bd61-58a9f6088792","Type":"ContainerDied","Data":"4eb661b64c28a35a9611cd03049e6e8208a03cf10111f61b4fbc9e8a5a0ed14b"} Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.872240 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4eb661b64c28a35a9611cd03049e6e8208a03cf10111f61b4fbc9e8a5a0ed14b" Mar 20 13:38:04 crc kubenswrapper[4690]: I0320 13:38:04.872283 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566898-244jx" Mar 20 13:38:05 crc kubenswrapper[4690]: I0320 13:38:05.150807 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566892-mhg5j"] Mar 20 13:38:05 crc kubenswrapper[4690]: I0320 13:38:05.155734 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566892-mhg5j"] Mar 20 13:38:06 crc kubenswrapper[4690]: I0320 13:38:06.420633 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7806c869-764a-4b3e-b440-e7b7a5db5581" path="/var/lib/kubelet/pods/7806c869-764a-4b3e-b440-e7b7a5db5581/volumes" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.651377 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj"] Mar 20 13:38:09 crc kubenswrapper[4690]: E0320 13:38:09.652142 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5702a850-32be-49da-bd61-58a9f6088792" containerName="oc" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.652154 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5702a850-32be-49da-bd61-58a9f6088792" containerName="oc" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.652273 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5702a850-32be-49da-bd61-58a9f6088792" containerName="oc" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.652681 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.655127 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.655328 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.655663 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.655807 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.656510 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-cbh9d" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.678102 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj"] Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.748380 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8ca65291-10fb-4dfa-9e7f-9b505e2fe542-webhook-cert\") pod \"metallb-operator-controller-manager-84576899bd-5q5sj\" (UID: \"8ca65291-10fb-4dfa-9e7f-9b505e2fe542\") " pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.748422 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8ca65291-10fb-4dfa-9e7f-9b505e2fe542-apiservice-cert\") pod \"metallb-operator-controller-manager-84576899bd-5q5sj\" (UID: \"8ca65291-10fb-4dfa-9e7f-9b505e2fe542\") " pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.748446 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmfhj\" (UniqueName: \"kubernetes.io/projected/8ca65291-10fb-4dfa-9e7f-9b505e2fe542-kube-api-access-pmfhj\") pod \"metallb-operator-controller-manager-84576899bd-5q5sj\" (UID: \"8ca65291-10fb-4dfa-9e7f-9b505e2fe542\") " pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.849882 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8ca65291-10fb-4dfa-9e7f-9b505e2fe542-webhook-cert\") pod \"metallb-operator-controller-manager-84576899bd-5q5sj\" (UID: \"8ca65291-10fb-4dfa-9e7f-9b505e2fe542\") " pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.849939 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8ca65291-10fb-4dfa-9e7f-9b505e2fe542-apiservice-cert\") pod \"metallb-operator-controller-manager-84576899bd-5q5sj\" (UID: \"8ca65291-10fb-4dfa-9e7f-9b505e2fe542\") " pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.849977 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmfhj\" (UniqueName: \"kubernetes.io/projected/8ca65291-10fb-4dfa-9e7f-9b505e2fe542-kube-api-access-pmfhj\") pod \"metallb-operator-controller-manager-84576899bd-5q5sj\" (UID: \"8ca65291-10fb-4dfa-9e7f-9b505e2fe542\") " pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.855757 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8ca65291-10fb-4dfa-9e7f-9b505e2fe542-apiservice-cert\") pod \"metallb-operator-controller-manager-84576899bd-5q5sj\" (UID: \"8ca65291-10fb-4dfa-9e7f-9b505e2fe542\") " pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.856083 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8ca65291-10fb-4dfa-9e7f-9b505e2fe542-webhook-cert\") pod \"metallb-operator-controller-manager-84576899bd-5q5sj\" (UID: \"8ca65291-10fb-4dfa-9e7f-9b505e2fe542\") " pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.883300 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmfhj\" (UniqueName: \"kubernetes.io/projected/8ca65291-10fb-4dfa-9e7f-9b505e2fe542-kube-api-access-pmfhj\") pod \"metallb-operator-controller-manager-84576899bd-5q5sj\" (UID: \"8ca65291-10fb-4dfa-9e7f-9b505e2fe542\") " pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.913941 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m"] Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.914618 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.917329 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.917328 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-jdwbb" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.917520 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.928862 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m"] Mar 20 13:38:09 crc kubenswrapper[4690]: I0320 13:38:09.968656 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.052879 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c0d6a728-27b8-420e-ab5b-ebee42426fad-webhook-cert\") pod \"metallb-operator-webhook-server-7dcf6c965f-blr7m\" (UID: \"c0d6a728-27b8-420e-ab5b-ebee42426fad\") " pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.053170 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c0d6a728-27b8-420e-ab5b-ebee42426fad-apiservice-cert\") pod \"metallb-operator-webhook-server-7dcf6c965f-blr7m\" (UID: \"c0d6a728-27b8-420e-ab5b-ebee42426fad\") " pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.053204 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdwkc\" (UniqueName: \"kubernetes.io/projected/c0d6a728-27b8-420e-ab5b-ebee42426fad-kube-api-access-vdwkc\") pod \"metallb-operator-webhook-server-7dcf6c965f-blr7m\" (UID: \"c0d6a728-27b8-420e-ab5b-ebee42426fad\") " pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.154133 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c0d6a728-27b8-420e-ab5b-ebee42426fad-webhook-cert\") pod \"metallb-operator-webhook-server-7dcf6c965f-blr7m\" (UID: \"c0d6a728-27b8-420e-ab5b-ebee42426fad\") " pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.154209 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c0d6a728-27b8-420e-ab5b-ebee42426fad-apiservice-cert\") pod \"metallb-operator-webhook-server-7dcf6c965f-blr7m\" (UID: \"c0d6a728-27b8-420e-ab5b-ebee42426fad\") " pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.154232 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdwkc\" (UniqueName: \"kubernetes.io/projected/c0d6a728-27b8-420e-ab5b-ebee42426fad-kube-api-access-vdwkc\") pod \"metallb-operator-webhook-server-7dcf6c965f-blr7m\" (UID: \"c0d6a728-27b8-420e-ab5b-ebee42426fad\") " pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.160619 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c0d6a728-27b8-420e-ab5b-ebee42426fad-webhook-cert\") pod \"metallb-operator-webhook-server-7dcf6c965f-blr7m\" (UID: \"c0d6a728-27b8-420e-ab5b-ebee42426fad\") " pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.160736 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c0d6a728-27b8-420e-ab5b-ebee42426fad-apiservice-cert\") pod \"metallb-operator-webhook-server-7dcf6c965f-blr7m\" (UID: \"c0d6a728-27b8-420e-ab5b-ebee42426fad\") " pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.171761 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdwkc\" (UniqueName: \"kubernetes.io/projected/c0d6a728-27b8-420e-ab5b-ebee42426fad-kube-api-access-vdwkc\") pod \"metallb-operator-webhook-server-7dcf6c965f-blr7m\" (UID: \"c0d6a728-27b8-420e-ab5b-ebee42426fad\") " pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.235135 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.391345 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj"] Mar 20 13:38:10 crc kubenswrapper[4690]: W0320 13:38:10.400304 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ca65291_10fb_4dfa_9e7f_9b505e2fe542.slice/crio-94833ab0379471af54ac439d8d4d5fc4adee73bb84ea3dc23f50096ed673eab2 WatchSource:0}: Error finding container 94833ab0379471af54ac439d8d4d5fc4adee73bb84ea3dc23f50096ed673eab2: Status 404 returned error can't find the container with id 94833ab0379471af54ac439d8d4d5fc4adee73bb84ea3dc23f50096ed673eab2 Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.445589 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m"] Mar 20 13:38:10 crc kubenswrapper[4690]: W0320 13:38:10.453032 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0d6a728_27b8_420e_ab5b_ebee42426fad.slice/crio-91dcf9bff2ef7a8bff19d10a6229d813517fe770f455b517dcfc40f44bf5dd11 WatchSource:0}: Error finding container 91dcf9bff2ef7a8bff19d10a6229d813517fe770f455b517dcfc40f44bf5dd11: Status 404 returned error can't find the container with id 91dcf9bff2ef7a8bff19d10a6229d813517fe770f455b517dcfc40f44bf5dd11 Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.913400 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" event={"ID":"c0d6a728-27b8-420e-ab5b-ebee42426fad","Type":"ContainerStarted","Data":"91dcf9bff2ef7a8bff19d10a6229d813517fe770f455b517dcfc40f44bf5dd11"} Mar 20 13:38:10 crc kubenswrapper[4690]: I0320 13:38:10.914871 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" event={"ID":"8ca65291-10fb-4dfa-9e7f-9b505e2fe542","Type":"ContainerStarted","Data":"94833ab0379471af54ac439d8d4d5fc4adee73bb84ea3dc23f50096ed673eab2"} Mar 20 13:38:15 crc kubenswrapper[4690]: I0320 13:38:15.955550 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" event={"ID":"8ca65291-10fb-4dfa-9e7f-9b505e2fe542","Type":"ContainerStarted","Data":"aec2d1f0128ceff0a0312164ff82b80c83d87a1075dceb67ae417969049d76ca"} Mar 20 13:38:15 crc kubenswrapper[4690]: I0320 13:38:15.958019 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:15 crc kubenswrapper[4690]: I0320 13:38:15.960028 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" event={"ID":"c0d6a728-27b8-420e-ab5b-ebee42426fad","Type":"ContainerStarted","Data":"e7a89d62f6a81e16a99054f846da63a676744d0dc967d2e0fd57a77bc3538d02"} Mar 20 13:38:15 crc kubenswrapper[4690]: I0320 13:38:15.960300 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:15 crc kubenswrapper[4690]: I0320 13:38:15.981586 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" podStartSLOduration=1.838540716 podStartE2EDuration="6.981562665s" podCreationTimestamp="2026-03-20 13:38:09 +0000 UTC" firstStartedPulling="2026-03-20 13:38:10.408653421 +0000 UTC m=+936.698253374" lastFinishedPulling="2026-03-20 13:38:15.55167538 +0000 UTC m=+941.841275323" observedRunningTime="2026-03-20 13:38:15.980961518 +0000 UTC m=+942.270561521" watchObservedRunningTime="2026-03-20 13:38:15.981562665 +0000 UTC m=+942.271162618" Mar 20 13:38:16 crc kubenswrapper[4690]: I0320 13:38:16.006425 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" podStartSLOduration=1.8848842220000002 podStartE2EDuration="7.006408045s" podCreationTimestamp="2026-03-20 13:38:09 +0000 UTC" firstStartedPulling="2026-03-20 13:38:10.455985165 +0000 UTC m=+936.745585108" lastFinishedPulling="2026-03-20 13:38:15.577508948 +0000 UTC m=+941.867108931" observedRunningTime="2026-03-20 13:38:16.005788577 +0000 UTC m=+942.295388520" watchObservedRunningTime="2026-03-20 13:38:16.006408045 +0000 UTC m=+942.296007978" Mar 20 13:38:30 crc kubenswrapper[4690]: I0320 13:38:30.241699 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7dcf6c965f-blr7m" Mar 20 13:38:42 crc kubenswrapper[4690]: I0320 13:38:42.960161 4690 scope.go:117] "RemoveContainer" containerID="5443a303b38eaf1240b93dae1a7ae7db9e3107f1eed33c36f6b4073cc2e8f5dd" Mar 20 13:38:49 crc kubenswrapper[4690]: I0320 13:38:49.971041 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-84576899bd-5q5sj" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.728115 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-dp5pp"] Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.730301 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.731883 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.732745 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-tr7rv" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.732971 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.741958 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg"] Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.742981 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.746885 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.754268 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg"] Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.799704 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-7kmfq"] Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.800587 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7kmfq" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.803376 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.803417 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-549jr" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.803504 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.803505 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.820612 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/80f8737c-71bf-4a64-964c-b902b649115c-frr-startup\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.820666 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xkb6m\" (UniqueName: \"kubernetes.io/projected/b179134f-d752-4779-bd5e-5cb469d25ac1-kube-api-access-xkb6m\") pod \"frr-k8s-webhook-server-bcc4b6f68-95bvg\" (UID: \"b179134f-d752-4779-bd5e-5cb469d25ac1\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.820773 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-metrics\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.820921 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glrhb\" (UniqueName: \"kubernetes.io/projected/80f8737c-71bf-4a64-964c-b902b649115c-kube-api-access-glrhb\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.820967 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80f8737c-71bf-4a64-964c-b902b649115c-metrics-certs\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.821016 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-frr-sockets\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.821141 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-frr-conf\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.821183 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-reloader\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.821252 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b179134f-d752-4779-bd5e-5cb469d25ac1-cert\") pod \"frr-k8s-webhook-server-bcc4b6f68-95bvg\" (UID: \"b179134f-d752-4779-bd5e-5cb469d25ac1\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.837079 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-7bb4cc7c98-m6wll"] Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.838131 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.846512 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-7bb4cc7c98-m6wll"] Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.849555 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922656 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-frr-conf\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922698 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-reloader\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922724 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b179134f-d752-4779-bd5e-5cb469d25ac1-cert\") pod \"frr-k8s-webhook-server-bcc4b6f68-95bvg\" (UID: \"b179134f-d752-4779-bd5e-5cb469d25ac1\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922746 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-metrics-certs\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922764 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1bdd146d-6133-45a4-8257-db7a84e8950b-cert\") pod \"controller-7bb4cc7c98-m6wll\" (UID: \"1bdd146d-6133-45a4-8257-db7a84e8950b\") " pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922782 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/80f8737c-71bf-4a64-964c-b902b649115c-frr-startup\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922797 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d5srq\" (UniqueName: \"kubernetes.io/projected/1bdd146d-6133-45a4-8257-db7a84e8950b-kube-api-access-d5srq\") pod \"controller-7bb4cc7c98-m6wll\" (UID: \"1bdd146d-6133-45a4-8257-db7a84e8950b\") " pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922813 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a639575b-8878-4027-92b0-6ba4f66270bf-metallb-excludel2\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922842 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xkb6m\" (UniqueName: \"kubernetes.io/projected/b179134f-d752-4779-bd5e-5cb469d25ac1-kube-api-access-xkb6m\") pod \"frr-k8s-webhook-server-bcc4b6f68-95bvg\" (UID: \"b179134f-d752-4779-bd5e-5cb469d25ac1\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922895 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-metrics\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922909 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1bdd146d-6133-45a4-8257-db7a84e8950b-metrics-certs\") pod \"controller-7bb4cc7c98-m6wll\" (UID: \"1bdd146d-6133-45a4-8257-db7a84e8950b\") " pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922924 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-memberlist\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922941 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmlcr\" (UniqueName: \"kubernetes.io/projected/a639575b-8878-4027-92b0-6ba4f66270bf-kube-api-access-wmlcr\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922963 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glrhb\" (UniqueName: \"kubernetes.io/projected/80f8737c-71bf-4a64-964c-b902b649115c-kube-api-access-glrhb\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922979 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80f8737c-71bf-4a64-964c-b902b649115c-metrics-certs\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.922997 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-frr-sockets\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.923101 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-frr-conf\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.923319 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-frr-sockets\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.923357 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-reloader\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.923736 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/80f8737c-71bf-4a64-964c-b902b649115c-metrics\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: E0320 13:38:50.923946 4690 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Mar 20 13:38:50 crc kubenswrapper[4690]: E0320 13:38:50.924023 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/80f8737c-71bf-4a64-964c-b902b649115c-metrics-certs podName:80f8737c-71bf-4a64-964c-b902b649115c nodeName:}" failed. No retries permitted until 2026-03-20 13:38:51.424005182 +0000 UTC m=+977.713605125 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/80f8737c-71bf-4a64-964c-b902b649115c-metrics-certs") pod "frr-k8s-dp5pp" (UID: "80f8737c-71bf-4a64-964c-b902b649115c") : secret "frr-k8s-certs-secret" not found Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.924842 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/80f8737c-71bf-4a64-964c-b902b649115c-frr-startup\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.933930 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b179134f-d752-4779-bd5e-5cb469d25ac1-cert\") pod \"frr-k8s-webhook-server-bcc4b6f68-95bvg\" (UID: \"b179134f-d752-4779-bd5e-5cb469d25ac1\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.949462 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xkb6m\" (UniqueName: \"kubernetes.io/projected/b179134f-d752-4779-bd5e-5cb469d25ac1-kube-api-access-xkb6m\") pod \"frr-k8s-webhook-server-bcc4b6f68-95bvg\" (UID: \"b179134f-d752-4779-bd5e-5cb469d25ac1\") " pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:38:50 crc kubenswrapper[4690]: I0320 13:38:50.951249 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glrhb\" (UniqueName: \"kubernetes.io/projected/80f8737c-71bf-4a64-964c-b902b649115c-kube-api-access-glrhb\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.025545 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1bdd146d-6133-45a4-8257-db7a84e8950b-metrics-certs\") pod \"controller-7bb4cc7c98-m6wll\" (UID: \"1bdd146d-6133-45a4-8257-db7a84e8950b\") " pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.026660 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-memberlist\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.026743 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmlcr\" (UniqueName: \"kubernetes.io/projected/a639575b-8878-4027-92b0-6ba4f66270bf-kube-api-access-wmlcr\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.026872 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-metrics-certs\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.026955 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1bdd146d-6133-45a4-8257-db7a84e8950b-cert\") pod \"controller-7bb4cc7c98-m6wll\" (UID: \"1bdd146d-6133-45a4-8257-db7a84e8950b\") " pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.027218 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a639575b-8878-4027-92b0-6ba4f66270bf-metallb-excludel2\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.027289 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d5srq\" (UniqueName: \"kubernetes.io/projected/1bdd146d-6133-45a4-8257-db7a84e8950b-kube-api-access-d5srq\") pod \"controller-7bb4cc7c98-m6wll\" (UID: \"1bdd146d-6133-45a4-8257-db7a84e8950b\") " pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:51 crc kubenswrapper[4690]: E0320 13:38:51.026952 4690 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Mar 20 13:38:51 crc kubenswrapper[4690]: E0320 13:38:51.027415 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-memberlist podName:a639575b-8878-4027-92b0-6ba4f66270bf nodeName:}" failed. No retries permitted until 2026-03-20 13:38:51.527395112 +0000 UTC m=+977.816995065 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-memberlist") pod "speaker-7kmfq" (UID: "a639575b-8878-4027-92b0-6ba4f66270bf") : secret "metallb-memberlist" not found Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.027991 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/a639575b-8878-4027-92b0-6ba4f66270bf-metallb-excludel2\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.029430 4690 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.029752 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1bdd146d-6133-45a4-8257-db7a84e8950b-metrics-certs\") pod \"controller-7bb4cc7c98-m6wll\" (UID: \"1bdd146d-6133-45a4-8257-db7a84e8950b\") " pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.031621 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-metrics-certs\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.041565 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1bdd146d-6133-45a4-8257-db7a84e8950b-cert\") pod \"controller-7bb4cc7c98-m6wll\" (UID: \"1bdd146d-6133-45a4-8257-db7a84e8950b\") " pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.044683 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmlcr\" (UniqueName: \"kubernetes.io/projected/a639575b-8878-4027-92b0-6ba4f66270bf-kube-api-access-wmlcr\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.052657 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d5srq\" (UniqueName: \"kubernetes.io/projected/1bdd146d-6133-45a4-8257-db7a84e8950b-kube-api-access-d5srq\") pod \"controller-7bb4cc7c98-m6wll\" (UID: \"1bdd146d-6133-45a4-8257-db7a84e8950b\") " pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.053790 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.160963 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.293474 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg"] Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.374453 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-7bb4cc7c98-m6wll"] Mar 20 13:38:51 crc kubenswrapper[4690]: W0320 13:38:51.384401 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1bdd146d_6133_45a4_8257_db7a84e8950b.slice/crio-1c71c939c9c59cb45b9d6653dc96175e471e73a077c20e25f0292e67daea884a WatchSource:0}: Error finding container 1c71c939c9c59cb45b9d6653dc96175e471e73a077c20e25f0292e67daea884a: Status 404 returned error can't find the container with id 1c71c939c9c59cb45b9d6653dc96175e471e73a077c20e25f0292e67daea884a Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.433536 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80f8737c-71bf-4a64-964c-b902b649115c-metrics-certs\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.438256 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/80f8737c-71bf-4a64-964c-b902b649115c-metrics-certs\") pod \"frr-k8s-dp5pp\" (UID: \"80f8737c-71bf-4a64-964c-b902b649115c\") " pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.534690 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-memberlist\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:51 crc kubenswrapper[4690]: E0320 13:38:51.535425 4690 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Mar 20 13:38:51 crc kubenswrapper[4690]: E0320 13:38:51.535620 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-memberlist podName:a639575b-8878-4027-92b0-6ba4f66270bf nodeName:}" failed. No retries permitted until 2026-03-20 13:38:52.535526803 +0000 UTC m=+978.825126786 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-memberlist") pod "speaker-7kmfq" (UID: "a639575b-8878-4027-92b0-6ba4f66270bf") : secret "metallb-memberlist" not found Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.644719 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.786228 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-7bb4cc7c98-m6wll" event={"ID":"1bdd146d-6133-45a4-8257-db7a84e8950b","Type":"ContainerStarted","Data":"4eaa325686082028f77195a1a752a080f693cd7011988c069f6eac6aa719bc5f"} Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.786677 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-7bb4cc7c98-m6wll" event={"ID":"1bdd146d-6133-45a4-8257-db7a84e8950b","Type":"ContainerStarted","Data":"405f46d0f2b18037a9a00ce0ec54b8fe628bad879138fd637da74891a3fdef8c"} Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.786713 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.786731 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-7bb4cc7c98-m6wll" event={"ID":"1bdd146d-6133-45a4-8257-db7a84e8950b","Type":"ContainerStarted","Data":"1c71c939c9c59cb45b9d6653dc96175e471e73a077c20e25f0292e67daea884a"} Mar 20 13:38:51 crc kubenswrapper[4690]: I0320 13:38:51.787602 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" event={"ID":"b179134f-d752-4779-bd5e-5cb469d25ac1","Type":"ContainerStarted","Data":"054fad9f9e7cf4def9c5466247a74ce96e8f183e85e5c1bd4ba7bc7ecd222cbf"} Mar 20 13:38:52 crc kubenswrapper[4690]: I0320 13:38:52.549778 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-memberlist\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:52 crc kubenswrapper[4690]: I0320 13:38:52.563010 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/a639575b-8878-4027-92b0-6ba4f66270bf-memberlist\") pod \"speaker-7kmfq\" (UID: \"a639575b-8878-4027-92b0-6ba4f66270bf\") " pod="metallb-system/speaker-7kmfq" Mar 20 13:38:52 crc kubenswrapper[4690]: I0320 13:38:52.615562 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-7kmfq" Mar 20 13:38:52 crc kubenswrapper[4690]: I0320 13:38:52.795755 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7kmfq" event={"ID":"a639575b-8878-4027-92b0-6ba4f66270bf","Type":"ContainerStarted","Data":"550d934534d7584445cff8774fd2098457c2f32bb16a5fa1a9852212e4e2e66e"} Mar 20 13:38:52 crc kubenswrapper[4690]: I0320 13:38:52.797206 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerStarted","Data":"9e8f23b145c7c08169b1f7d8e6c1d0949bd9336aa7031242a950daec3e4c5b37"} Mar 20 13:38:53 crc kubenswrapper[4690]: I0320 13:38:53.805044 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7kmfq" event={"ID":"a639575b-8878-4027-92b0-6ba4f66270bf","Type":"ContainerStarted","Data":"ceccee0c7f44566c1ad4b4fc868c1e292ecac8a3dc334d492967d7068191f6b2"} Mar 20 13:38:53 crc kubenswrapper[4690]: I0320 13:38:53.805393 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-7kmfq" event={"ID":"a639575b-8878-4027-92b0-6ba4f66270bf","Type":"ContainerStarted","Data":"3c08143e2d953f5b9b0c19b75b8cf31ac7bfac7bb67e1d75f5bbdcbbf2f2b1ed"} Mar 20 13:38:53 crc kubenswrapper[4690]: I0320 13:38:53.805429 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-7kmfq" Mar 20 13:38:53 crc kubenswrapper[4690]: I0320 13:38:53.833084 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-7kmfq" podStartSLOduration=3.83306772 podStartE2EDuration="3.83306772s" podCreationTimestamp="2026-03-20 13:38:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:38:53.832082922 +0000 UTC m=+980.121682865" watchObservedRunningTime="2026-03-20 13:38:53.83306772 +0000 UTC m=+980.122667663" Mar 20 13:38:53 crc kubenswrapper[4690]: I0320 13:38:53.840477 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-7bb4cc7c98-m6wll" podStartSLOduration=3.84046118 podStartE2EDuration="3.84046118s" podCreationTimestamp="2026-03-20 13:38:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:38:51.810704028 +0000 UTC m=+978.100303981" watchObservedRunningTime="2026-03-20 13:38:53.84046118 +0000 UTC m=+980.130061123" Mar 20 13:38:59 crc kubenswrapper[4690]: I0320 13:38:59.850629 4690 generic.go:334] "Generic (PLEG): container finished" podID="80f8737c-71bf-4a64-964c-b902b649115c" containerID="2b0c8b90e0bb7affb595b9df74a1fcf17cf04196495b7e469ded23084756dfa8" exitCode=0 Mar 20 13:38:59 crc kubenswrapper[4690]: I0320 13:38:59.850737 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerDied","Data":"2b0c8b90e0bb7affb595b9df74a1fcf17cf04196495b7e469ded23084756dfa8"} Mar 20 13:38:59 crc kubenswrapper[4690]: I0320 13:38:59.853823 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" event={"ID":"b179134f-d752-4779-bd5e-5cb469d25ac1","Type":"ContainerStarted","Data":"1b2d7ba83527786e0be0c229e283f5d9d7f635786ac19c97658c39f92b6afa51"} Mar 20 13:38:59 crc kubenswrapper[4690]: I0320 13:38:59.854108 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:39:00 crc kubenswrapper[4690]: I0320 13:39:00.865599 4690 generic.go:334] "Generic (PLEG): container finished" podID="80f8737c-71bf-4a64-964c-b902b649115c" containerID="8b22fabf6c1e7b9aa64baec4f187746f7870c1aad17bc8c204abb289a4906a0e" exitCode=0 Mar 20 13:39:00 crc kubenswrapper[4690]: I0320 13:39:00.865693 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerDied","Data":"8b22fabf6c1e7b9aa64baec4f187746f7870c1aad17bc8c204abb289a4906a0e"} Mar 20 13:39:00 crc kubenswrapper[4690]: I0320 13:39:00.927183 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" podStartSLOduration=3.540643584 podStartE2EDuration="10.927151209s" podCreationTimestamp="2026-03-20 13:38:50 +0000 UTC" firstStartedPulling="2026-03-20 13:38:51.30157542 +0000 UTC m=+977.591175373" lastFinishedPulling="2026-03-20 13:38:58.688083055 +0000 UTC m=+984.977682998" observedRunningTime="2026-03-20 13:38:59.905817645 +0000 UTC m=+986.195417628" watchObservedRunningTime="2026-03-20 13:39:00.927151209 +0000 UTC m=+987.216751192" Mar 20 13:39:01 crc kubenswrapper[4690]: I0320 13:39:01.165243 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-7bb4cc7c98-m6wll" Mar 20 13:39:01 crc kubenswrapper[4690]: I0320 13:39:01.878521 4690 generic.go:334] "Generic (PLEG): container finished" podID="80f8737c-71bf-4a64-964c-b902b649115c" containerID="669a6f60bbcf09faf3293d99a4f3c9f6104d846770e3758573c8f1da842dcc6f" exitCode=0 Mar 20 13:39:01 crc kubenswrapper[4690]: I0320 13:39:01.879345 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerDied","Data":"669a6f60bbcf09faf3293d99a4f3c9f6104d846770e3758573c8f1da842dcc6f"} Mar 20 13:39:02 crc kubenswrapper[4690]: I0320 13:39:02.621348 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-7kmfq" Mar 20 13:39:02 crc kubenswrapper[4690]: I0320 13:39:02.894416 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerStarted","Data":"fe5e3b64318b28a36f546f81d368f9fc5e74183d2940ff7f9331c3ee56ba4d90"} Mar 20 13:39:02 crc kubenswrapper[4690]: I0320 13:39:02.894463 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerStarted","Data":"3af65d5f7ff9c319d6b2af8bbebd7b07db3f839d77cd5ab4e7b11e00470b9e0a"} Mar 20 13:39:02 crc kubenswrapper[4690]: I0320 13:39:02.894478 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerStarted","Data":"52fc3f5d7793054ac3ef6daf85308dbd6bd3d8371027c1bbcf0ccb38ed3b2496"} Mar 20 13:39:02 crc kubenswrapper[4690]: I0320 13:39:02.894493 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerStarted","Data":"c986b22b7fd6cba391aaecf7ec773de5b1f1ba3d4872aa516fa206433a46447e"} Mar 20 13:39:02 crc kubenswrapper[4690]: I0320 13:39:02.894505 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerStarted","Data":"d955759e97af356eae8b7d4184e1b8282db358064b5404a57f35f97a7b738291"} Mar 20 13:39:03 crc kubenswrapper[4690]: I0320 13:39:03.906248 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-dp5pp" event={"ID":"80f8737c-71bf-4a64-964c-b902b649115c","Type":"ContainerStarted","Data":"04dcb71cd882b7f893ce23eab7217efd81692de18bcc38b0333a1b97ba679e52"} Mar 20 13:39:03 crc kubenswrapper[4690]: I0320 13:39:03.906681 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:39:03 crc kubenswrapper[4690]: I0320 13:39:03.946134 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-dp5pp" podStartSLOduration=7.047222172 podStartE2EDuration="13.946111941s" podCreationTimestamp="2026-03-20 13:38:50 +0000 UTC" firstStartedPulling="2026-03-20 13:38:51.802965348 +0000 UTC m=+978.092565301" lastFinishedPulling="2026-03-20 13:38:58.701855127 +0000 UTC m=+984.991455070" observedRunningTime="2026-03-20 13:39:03.942460278 +0000 UTC m=+990.232060251" watchObservedRunningTime="2026-03-20 13:39:03.946111941 +0000 UTC m=+990.235711924" Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.431510 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-6htpc"] Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.433120 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6htpc" Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.435767 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-zn9jw" Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.436008 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.436070 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.466891 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6htpc"] Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.555572 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br5w8\" (UniqueName: \"kubernetes.io/projected/df8b4e1b-f399-46c3-96c9-1c6d1c713415-kube-api-access-br5w8\") pod \"openstack-operator-index-6htpc\" (UID: \"df8b4e1b-f399-46c3-96c9-1c6d1c713415\") " pod="openstack-operators/openstack-operator-index-6htpc" Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.657146 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br5w8\" (UniqueName: \"kubernetes.io/projected/df8b4e1b-f399-46c3-96c9-1c6d1c713415-kube-api-access-br5w8\") pod \"openstack-operator-index-6htpc\" (UID: \"df8b4e1b-f399-46c3-96c9-1c6d1c713415\") " pod="openstack-operators/openstack-operator-index-6htpc" Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.699579 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br5w8\" (UniqueName: \"kubernetes.io/projected/df8b4e1b-f399-46c3-96c9-1c6d1c713415-kube-api-access-br5w8\") pod \"openstack-operator-index-6htpc\" (UID: \"df8b4e1b-f399-46c3-96c9-1c6d1c713415\") " pod="openstack-operators/openstack-operator-index-6htpc" Mar 20 13:39:05 crc kubenswrapper[4690]: I0320 13:39:05.770804 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6htpc" Mar 20 13:39:06 crc kubenswrapper[4690]: I0320 13:39:06.177061 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-6htpc"] Mar 20 13:39:06 crc kubenswrapper[4690]: I0320 13:39:06.645620 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:39:06 crc kubenswrapper[4690]: I0320 13:39:06.717823 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:39:06 crc kubenswrapper[4690]: I0320 13:39:06.922960 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6htpc" event={"ID":"df8b4e1b-f399-46c3-96c9-1c6d1c713415","Type":"ContainerStarted","Data":"d0592bb5566d32d1ffbbadf915a9661b27d73f827bd016a3bf7c53de19b7cccb"} Mar 20 13:39:08 crc kubenswrapper[4690]: I0320 13:39:08.801981 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-6htpc"] Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.417493 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-7rrdk"] Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.419010 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7rrdk" Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.438367 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7rrdk"] Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.513293 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mtp2\" (UniqueName: \"kubernetes.io/projected/d762a7d0-5c65-4d5c-8e36-5d271d27f231-kube-api-access-6mtp2\") pod \"openstack-operator-index-7rrdk\" (UID: \"d762a7d0-5c65-4d5c-8e36-5d271d27f231\") " pod="openstack-operators/openstack-operator-index-7rrdk" Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.614898 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mtp2\" (UniqueName: \"kubernetes.io/projected/d762a7d0-5c65-4d5c-8e36-5d271d27f231-kube-api-access-6mtp2\") pod \"openstack-operator-index-7rrdk\" (UID: \"d762a7d0-5c65-4d5c-8e36-5d271d27f231\") " pod="openstack-operators/openstack-operator-index-7rrdk" Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.646553 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mtp2\" (UniqueName: \"kubernetes.io/projected/d762a7d0-5c65-4d5c-8e36-5d271d27f231-kube-api-access-6mtp2\") pod \"openstack-operator-index-7rrdk\" (UID: \"d762a7d0-5c65-4d5c-8e36-5d271d27f231\") " pod="openstack-operators/openstack-operator-index-7rrdk" Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.746499 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-7rrdk" Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.943067 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6htpc" event={"ID":"df8b4e1b-f399-46c3-96c9-1c6d1c713415","Type":"ContainerStarted","Data":"c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0"} Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.943640 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-6htpc" podUID="df8b4e1b-f399-46c3-96c9-1c6d1c713415" containerName="registry-server" containerID="cri-o://c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0" gracePeriod=2 Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.971809 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-6htpc" podStartSLOduration=2.49946989 podStartE2EDuration="4.971774746s" podCreationTimestamp="2026-03-20 13:39:05 +0000 UTC" firstStartedPulling="2026-03-20 13:39:06.178727151 +0000 UTC m=+992.468327094" lastFinishedPulling="2026-03-20 13:39:08.651032007 +0000 UTC m=+994.940631950" observedRunningTime="2026-03-20 13:39:09.961611997 +0000 UTC m=+996.251211980" watchObservedRunningTime="2026-03-20 13:39:09.971774746 +0000 UTC m=+996.261374739" Mar 20 13:39:09 crc kubenswrapper[4690]: I0320 13:39:09.991051 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-7rrdk"] Mar 20 13:39:10 crc kubenswrapper[4690]: W0320 13:39:10.033318 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd762a7d0_5c65_4d5c_8e36_5d271d27f231.slice/crio-1fa32880076a3545a0f1026761907b6efa0a8dfc94b9f993ac3210cd5ddafa53 WatchSource:0}: Error finding container 1fa32880076a3545a0f1026761907b6efa0a8dfc94b9f993ac3210cd5ddafa53: Status 404 returned error can't find the container with id 1fa32880076a3545a0f1026761907b6efa0a8dfc94b9f993ac3210cd5ddafa53 Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.269910 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6htpc" Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.324412 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br5w8\" (UniqueName: \"kubernetes.io/projected/df8b4e1b-f399-46c3-96c9-1c6d1c713415-kube-api-access-br5w8\") pod \"df8b4e1b-f399-46c3-96c9-1c6d1c713415\" (UID: \"df8b4e1b-f399-46c3-96c9-1c6d1c713415\") " Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.333936 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df8b4e1b-f399-46c3-96c9-1c6d1c713415-kube-api-access-br5w8" (OuterVolumeSpecName: "kube-api-access-br5w8") pod "df8b4e1b-f399-46c3-96c9-1c6d1c713415" (UID: "df8b4e1b-f399-46c3-96c9-1c6d1c713415"). InnerVolumeSpecName "kube-api-access-br5w8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.426704 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br5w8\" (UniqueName: \"kubernetes.io/projected/df8b4e1b-f399-46c3-96c9-1c6d1c713415-kube-api-access-br5w8\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.954774 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7rrdk" event={"ID":"d762a7d0-5c65-4d5c-8e36-5d271d27f231","Type":"ContainerStarted","Data":"16fa10060856446734fb11e2179db484a99561ae000ad26290be3104b3f24a41"} Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.954824 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-7rrdk" event={"ID":"d762a7d0-5c65-4d5c-8e36-5d271d27f231","Type":"ContainerStarted","Data":"1fa32880076a3545a0f1026761907b6efa0a8dfc94b9f993ac3210cd5ddafa53"} Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.963680 4690 generic.go:334] "Generic (PLEG): container finished" podID="df8b4e1b-f399-46c3-96c9-1c6d1c713415" containerID="c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0" exitCode=0 Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.963771 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-6htpc" Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.963804 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6htpc" event={"ID":"df8b4e1b-f399-46c3-96c9-1c6d1c713415","Type":"ContainerDied","Data":"c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0"} Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.963884 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-6htpc" event={"ID":"df8b4e1b-f399-46c3-96c9-1c6d1c713415","Type":"ContainerDied","Data":"d0592bb5566d32d1ffbbadf915a9661b27d73f827bd016a3bf7c53de19b7cccb"} Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.963915 4690 scope.go:117] "RemoveContainer" containerID="c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0" Mar 20 13:39:10 crc kubenswrapper[4690]: I0320 13:39:10.986623 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-7rrdk" podStartSLOduration=1.927874266 podStartE2EDuration="1.986594256s" podCreationTimestamp="2026-03-20 13:39:09 +0000 UTC" firstStartedPulling="2026-03-20 13:39:10.037997859 +0000 UTC m=+996.327597802" lastFinishedPulling="2026-03-20 13:39:10.096717849 +0000 UTC m=+996.386317792" observedRunningTime="2026-03-20 13:39:10.983617491 +0000 UTC m=+997.273217444" watchObservedRunningTime="2026-03-20 13:39:10.986594256 +0000 UTC m=+997.276194249" Mar 20 13:39:11 crc kubenswrapper[4690]: I0320 13:39:11.020430 4690 scope.go:117] "RemoveContainer" containerID="c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0" Mar 20 13:39:11 crc kubenswrapper[4690]: E0320 13:39:11.021291 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0\": container with ID starting with c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0 not found: ID does not exist" containerID="c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0" Mar 20 13:39:11 crc kubenswrapper[4690]: I0320 13:39:11.021532 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0"} err="failed to get container status \"c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0\": rpc error: code = NotFound desc = could not find container \"c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0\": container with ID starting with c6728272aef48a30df71bfc2f385480d05186ca78bf57ca6bd321a24530944e0 not found: ID does not exist" Mar 20 13:39:11 crc kubenswrapper[4690]: I0320 13:39:11.021872 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-6htpc"] Mar 20 13:39:11 crc kubenswrapper[4690]: I0320 13:39:11.031744 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-6htpc"] Mar 20 13:39:11 crc kubenswrapper[4690]: I0320 13:39:11.061095 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-bcc4b6f68-95bvg" Mar 20 13:39:12 crc kubenswrapper[4690]: I0320 13:39:12.424609 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df8b4e1b-f399-46c3-96c9-1c6d1c713415" path="/var/lib/kubelet/pods/df8b4e1b-f399-46c3-96c9-1c6d1c713415/volumes" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.827805 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bttfl"] Mar 20 13:39:13 crc kubenswrapper[4690]: E0320 13:39:13.829665 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df8b4e1b-f399-46c3-96c9-1c6d1c713415" containerName="registry-server" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.829838 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="df8b4e1b-f399-46c3-96c9-1c6d1c713415" containerName="registry-server" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.830132 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="df8b4e1b-f399-46c3-96c9-1c6d1c713415" containerName="registry-server" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.831353 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.846350 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bttfl"] Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.875029 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mjhg2\" (UniqueName: \"kubernetes.io/projected/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-kube-api-access-mjhg2\") pod \"redhat-marketplace-bttfl\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.875114 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-utilities\") pod \"redhat-marketplace-bttfl\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.875347 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-catalog-content\") pod \"redhat-marketplace-bttfl\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.976476 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mjhg2\" (UniqueName: \"kubernetes.io/projected/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-kube-api-access-mjhg2\") pod \"redhat-marketplace-bttfl\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.976571 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-utilities\") pod \"redhat-marketplace-bttfl\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.976624 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-catalog-content\") pod \"redhat-marketplace-bttfl\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.977109 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-catalog-content\") pod \"redhat-marketplace-bttfl\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:13 crc kubenswrapper[4690]: I0320 13:39:13.977351 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-utilities\") pod \"redhat-marketplace-bttfl\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:14 crc kubenswrapper[4690]: I0320 13:39:13.998885 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mjhg2\" (UniqueName: \"kubernetes.io/projected/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-kube-api-access-mjhg2\") pod \"redhat-marketplace-bttfl\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:14 crc kubenswrapper[4690]: I0320 13:39:14.165563 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:14 crc kubenswrapper[4690]: I0320 13:39:14.394830 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bttfl"] Mar 20 13:39:14 crc kubenswrapper[4690]: W0320 13:39:14.402170 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8ad30af5_f9b5_418a_8ec6_35cb6c0dc61f.slice/crio-60ff5d8982dd37f632663439056ffa3972b9bb730b1bcf84ef24e298f8502cc7 WatchSource:0}: Error finding container 60ff5d8982dd37f632663439056ffa3972b9bb730b1bcf84ef24e298f8502cc7: Status 404 returned error can't find the container with id 60ff5d8982dd37f632663439056ffa3972b9bb730b1bcf84ef24e298f8502cc7 Mar 20 13:39:15 crc kubenswrapper[4690]: I0320 13:39:15.011214 4690 generic.go:334] "Generic (PLEG): container finished" podID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerID="d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95" exitCode=0 Mar 20 13:39:15 crc kubenswrapper[4690]: I0320 13:39:15.012002 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bttfl" event={"ID":"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f","Type":"ContainerDied","Data":"d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95"} Mar 20 13:39:15 crc kubenswrapper[4690]: I0320 13:39:15.012148 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bttfl" event={"ID":"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f","Type":"ContainerStarted","Data":"60ff5d8982dd37f632663439056ffa3972b9bb730b1bcf84ef24e298f8502cc7"} Mar 20 13:39:16 crc kubenswrapper[4690]: I0320 13:39:16.021913 4690 generic.go:334] "Generic (PLEG): container finished" podID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerID="8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8" exitCode=0 Mar 20 13:39:16 crc kubenswrapper[4690]: I0320 13:39:16.022033 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bttfl" event={"ID":"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f","Type":"ContainerDied","Data":"8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8"} Mar 20 13:39:17 crc kubenswrapper[4690]: I0320 13:39:17.034995 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bttfl" event={"ID":"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f","Type":"ContainerStarted","Data":"1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c"} Mar 20 13:39:17 crc kubenswrapper[4690]: I0320 13:39:17.066361 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bttfl" podStartSLOduration=2.625210527 podStartE2EDuration="4.06633583s" podCreationTimestamp="2026-03-20 13:39:13 +0000 UTC" firstStartedPulling="2026-03-20 13:39:15.018691529 +0000 UTC m=+1001.308291482" lastFinishedPulling="2026-03-20 13:39:16.459816812 +0000 UTC m=+1002.749416785" observedRunningTime="2026-03-20 13:39:17.059923078 +0000 UTC m=+1003.349523091" watchObservedRunningTime="2026-03-20 13:39:17.06633583 +0000 UTC m=+1003.355935813" Mar 20 13:39:19 crc kubenswrapper[4690]: I0320 13:39:19.747707 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-7rrdk" Mar 20 13:39:19 crc kubenswrapper[4690]: I0320 13:39:19.748251 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-7rrdk" Mar 20 13:39:19 crc kubenswrapper[4690]: I0320 13:39:19.791927 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-7rrdk" Mar 20 13:39:20 crc kubenswrapper[4690]: I0320 13:39:20.095102 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-7rrdk" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.072525 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht"] Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.075193 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.078693 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-nwgqf" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.091398 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht"] Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.177401 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzxv4\" (UniqueName: \"kubernetes.io/projected/7ce1f851-f997-4617-a88f-36f2d7ca6f51-kube-api-access-fzxv4\") pod \"1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.177452 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-bundle\") pod \"1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.177878 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-util\") pod \"1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.279092 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzxv4\" (UniqueName: \"kubernetes.io/projected/7ce1f851-f997-4617-a88f-36f2d7ca6f51-kube-api-access-fzxv4\") pod \"1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.279194 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-bundle\") pod \"1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.279332 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-util\") pod \"1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.280335 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-bundle\") pod \"1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.280412 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-util\") pod \"1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.318291 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzxv4\" (UniqueName: \"kubernetes.io/projected/7ce1f851-f997-4617-a88f-36f2d7ca6f51-kube-api-access-fzxv4\") pod \"1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.413655 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.651473 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-dp5pp" Mar 20 13:39:21 crc kubenswrapper[4690]: I0320 13:39:21.956797 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht"] Mar 20 13:39:21 crc kubenswrapper[4690]: W0320 13:39:21.964472 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ce1f851_f997_4617_a88f_36f2d7ca6f51.slice/crio-6e21c23e52b29ecea659f10f83148b90116ed265be416c53ab91ae4bc27a706f WatchSource:0}: Error finding container 6e21c23e52b29ecea659f10f83148b90116ed265be416c53ab91ae4bc27a706f: Status 404 returned error can't find the container with id 6e21c23e52b29ecea659f10f83148b90116ed265be416c53ab91ae4bc27a706f Mar 20 13:39:22 crc kubenswrapper[4690]: I0320 13:39:22.069117 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" event={"ID":"7ce1f851-f997-4617-a88f-36f2d7ca6f51","Type":"ContainerStarted","Data":"6e21c23e52b29ecea659f10f83148b90116ed265be416c53ab91ae4bc27a706f"} Mar 20 13:39:23 crc kubenswrapper[4690]: I0320 13:39:23.076552 4690 generic.go:334] "Generic (PLEG): container finished" podID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerID="c00b02c241ca7a24b83706193f33cbce79c2f5135cb4d92ab18bd7c70f8fbb1a" exitCode=0 Mar 20 13:39:23 crc kubenswrapper[4690]: I0320 13:39:23.076627 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" event={"ID":"7ce1f851-f997-4617-a88f-36f2d7ca6f51","Type":"ContainerDied","Data":"c00b02c241ca7a24b83706193f33cbce79c2f5135cb4d92ab18bd7c70f8fbb1a"} Mar 20 13:39:24 crc kubenswrapper[4690]: I0320 13:39:24.090267 4690 generic.go:334] "Generic (PLEG): container finished" podID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerID="a6e34255e598f5fda684f9c0f7c2a9dff6b985c332e65ef840d33c1a2a7f232b" exitCode=0 Mar 20 13:39:24 crc kubenswrapper[4690]: I0320 13:39:24.090707 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" event={"ID":"7ce1f851-f997-4617-a88f-36f2d7ca6f51","Type":"ContainerDied","Data":"a6e34255e598f5fda684f9c0f7c2a9dff6b985c332e65ef840d33c1a2a7f232b"} Mar 20 13:39:24 crc kubenswrapper[4690]: I0320 13:39:24.166634 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:24 crc kubenswrapper[4690]: I0320 13:39:24.167091 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:24 crc kubenswrapper[4690]: I0320 13:39:24.231599 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:25 crc kubenswrapper[4690]: I0320 13:39:25.102207 4690 generic.go:334] "Generic (PLEG): container finished" podID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerID="08ff93c675e8d5ae2874ec579b928b37cd228dbf08bb4428ab0f693f38a77570" exitCode=0 Mar 20 13:39:25 crc kubenswrapper[4690]: I0320 13:39:25.102272 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" event={"ID":"7ce1f851-f997-4617-a88f-36f2d7ca6f51","Type":"ContainerDied","Data":"08ff93c675e8d5ae2874ec579b928b37cd228dbf08bb4428ab0f693f38a77570"} Mar 20 13:39:25 crc kubenswrapper[4690]: I0320 13:39:25.169714 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.461799 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.566019 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-bundle\") pod \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.566131 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-util\") pod \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.566429 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzxv4\" (UniqueName: \"kubernetes.io/projected/7ce1f851-f997-4617-a88f-36f2d7ca6f51-kube-api-access-fzxv4\") pod \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\" (UID: \"7ce1f851-f997-4617-a88f-36f2d7ca6f51\") " Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.567998 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-bundle" (OuterVolumeSpecName: "bundle") pod "7ce1f851-f997-4617-a88f-36f2d7ca6f51" (UID: "7ce1f851-f997-4617-a88f-36f2d7ca6f51"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.568219 4690 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.574544 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ce1f851-f997-4617-a88f-36f2d7ca6f51-kube-api-access-fzxv4" (OuterVolumeSpecName: "kube-api-access-fzxv4") pod "7ce1f851-f997-4617-a88f-36f2d7ca6f51" (UID: "7ce1f851-f997-4617-a88f-36f2d7ca6f51"). InnerVolumeSpecName "kube-api-access-fzxv4". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.584763 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-util" (OuterVolumeSpecName: "util") pod "7ce1f851-f997-4617-a88f-36f2d7ca6f51" (UID: "7ce1f851-f997-4617-a88f-36f2d7ca6f51"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.669913 4690 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/7ce1f851-f997-4617-a88f-36f2d7ca6f51-util\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:26 crc kubenswrapper[4690]: I0320 13:39:26.669945 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzxv4\" (UniqueName: \"kubernetes.io/projected/7ce1f851-f997-4617-a88f-36f2d7ca6f51-kube-api-access-fzxv4\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.123408 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" event={"ID":"7ce1f851-f997-4617-a88f-36f2d7ca6f51","Type":"ContainerDied","Data":"6e21c23e52b29ecea659f10f83148b90116ed265be416c53ab91ae4bc27a706f"} Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.123455 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e21c23e52b29ecea659f10f83148b90116ed265be416c53ab91ae4bc27a706f" Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.123548 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht" Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.404206 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bttfl"] Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.404920 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bttfl" podUID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerName="registry-server" containerID="cri-o://1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c" gracePeriod=2 Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.834661 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.892010 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mjhg2\" (UniqueName: \"kubernetes.io/projected/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-kube-api-access-mjhg2\") pod \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.892254 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-utilities\") pod \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.892402 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-catalog-content\") pod \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\" (UID: \"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f\") " Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.894321 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-utilities" (OuterVolumeSpecName: "utilities") pod "8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" (UID: "8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.899241 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-kube-api-access-mjhg2" (OuterVolumeSpecName: "kube-api-access-mjhg2") pod "8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" (UID: "8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f"). InnerVolumeSpecName "kube-api-access-mjhg2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.949014 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" (UID: "8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.995605 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.995655 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mjhg2\" (UniqueName: \"kubernetes.io/projected/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-kube-api-access-mjhg2\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:27 crc kubenswrapper[4690]: I0320 13:39:27.995675 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.130979 4690 generic.go:334] "Generic (PLEG): container finished" podID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerID="1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c" exitCode=0 Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.131027 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bttfl" event={"ID":"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f","Type":"ContainerDied","Data":"1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c"} Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.131060 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bttfl" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.131094 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bttfl" event={"ID":"8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f","Type":"ContainerDied","Data":"60ff5d8982dd37f632663439056ffa3972b9bb730b1bcf84ef24e298f8502cc7"} Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.131121 4690 scope.go:117] "RemoveContainer" containerID="1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.148237 4690 scope.go:117] "RemoveContainer" containerID="8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.160407 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bttfl"] Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.167392 4690 scope.go:117] "RemoveContainer" containerID="d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.169428 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bttfl"] Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.189674 4690 scope.go:117] "RemoveContainer" containerID="1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c" Mar 20 13:39:28 crc kubenswrapper[4690]: E0320 13:39:28.190173 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c\": container with ID starting with 1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c not found: ID does not exist" containerID="1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.190237 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c"} err="failed to get container status \"1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c\": rpc error: code = NotFound desc = could not find container \"1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c\": container with ID starting with 1a88d73716356fdf21141dba7cee35925a3a56e235ecad7d3efad5bec4d77d7c not found: ID does not exist" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.190264 4690 scope.go:117] "RemoveContainer" containerID="8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8" Mar 20 13:39:28 crc kubenswrapper[4690]: E0320 13:39:28.190666 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8\": container with ID starting with 8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8 not found: ID does not exist" containerID="8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.190715 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8"} err="failed to get container status \"8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8\": rpc error: code = NotFound desc = could not find container \"8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8\": container with ID starting with 8a35f981da8fdf63fc81beb5fb020eac9eee4634096e2f1c0b3f703ec294afd8 not found: ID does not exist" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.190744 4690 scope.go:117] "RemoveContainer" containerID="d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95" Mar 20 13:39:28 crc kubenswrapper[4690]: E0320 13:39:28.191239 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95\": container with ID starting with d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95 not found: ID does not exist" containerID="d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.191287 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95"} err="failed to get container status \"d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95\": rpc error: code = NotFound desc = could not find container \"d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95\": container with ID starting with d15301c1f31b9a9f83064e09b8c607730f186bd80668b678ddabd89c3d127e95 not found: ID does not exist" Mar 20 13:39:28 crc kubenswrapper[4690]: I0320 13:39:28.423534 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" path="/var/lib/kubelet/pods/8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f/volumes" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.438464 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-init-59b5998766-5npwc"] Mar 20 13:39:31 crc kubenswrapper[4690]: E0320 13:39:31.439874 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerName="util" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.439890 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerName="util" Mar 20 13:39:31 crc kubenswrapper[4690]: E0320 13:39:31.439904 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerName="pull" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.439912 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerName="pull" Mar 20 13:39:31 crc kubenswrapper[4690]: E0320 13:39:31.439919 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerName="extract-utilities" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.439927 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerName="extract-utilities" Mar 20 13:39:31 crc kubenswrapper[4690]: E0320 13:39:31.439939 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerName="extract" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.439946 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerName="extract" Mar 20 13:39:31 crc kubenswrapper[4690]: E0320 13:39:31.439962 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerName="registry-server" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.439969 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerName="registry-server" Mar 20 13:39:31 crc kubenswrapper[4690]: E0320 13:39:31.439984 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerName="extract-content" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.439991 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerName="extract-content" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.440122 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ad30af5-f9b5-418a-8ec6-35cb6c0dc61f" containerName="registry-server" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.440140 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ce1f851-f997-4617-a88f-36f2d7ca6f51" containerName="extract" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.440684 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" Mar 20 13:39:31 crc kubenswrapper[4690]: W0320 13:39:31.442331 4690 reflector.go:561] object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-fzl6b": failed to list *v1.Secret: secrets "openstack-operator-controller-init-dockercfg-fzl6b" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack-operators": no relationship found between node 'crc' and this object Mar 20 13:39:31 crc kubenswrapper[4690]: E0320 13:39:31.442371 4690 reflector.go:158] "Unhandled Error" err="object-\"openstack-operators\"/\"openstack-operator-controller-init-dockercfg-fzl6b\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openstack-operator-controller-init-dockercfg-fzl6b\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack-operators\": no relationship found between node 'crc' and this object" logger="UnhandledError" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.476292 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59b5998766-5npwc"] Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.547316 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnksg\" (UniqueName: \"kubernetes.io/projected/749b59ec-5f9a-41b3-a48c-c5746c3d0b43-kube-api-access-vnksg\") pod \"openstack-operator-controller-init-59b5998766-5npwc\" (UID: \"749b59ec-5f9a-41b3-a48c-c5746c3d0b43\") " pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.648311 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnksg\" (UniqueName: \"kubernetes.io/projected/749b59ec-5f9a-41b3-a48c-c5746c3d0b43-kube-api-access-vnksg\") pod \"openstack-operator-controller-init-59b5998766-5npwc\" (UID: \"749b59ec-5f9a-41b3-a48c-c5746c3d0b43\") " pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" Mar 20 13:39:31 crc kubenswrapper[4690]: I0320 13:39:31.668915 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnksg\" (UniqueName: \"kubernetes.io/projected/749b59ec-5f9a-41b3-a48c-c5746c3d0b43-kube-api-access-vnksg\") pod \"openstack-operator-controller-init-59b5998766-5npwc\" (UID: \"749b59ec-5f9a-41b3-a48c-c5746c3d0b43\") " pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" Mar 20 13:39:32 crc kubenswrapper[4690]: I0320 13:39:32.709721 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-init-dockercfg-fzl6b" Mar 20 13:39:32 crc kubenswrapper[4690]: I0320 13:39:32.710271 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" Mar 20 13:39:32 crc kubenswrapper[4690]: I0320 13:39:32.940417 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-init-59b5998766-5npwc"] Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.752226 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" event={"ID":"749b59ec-5f9a-41b3-a48c-c5746c3d0b43","Type":"ContainerStarted","Data":"018bd5a34b5a4900a7fe13b61943ec42a1d77b704b8ca1ec59d20b2f77c57d86"} Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.816385 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-chjsr"] Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.821474 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.822399 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-chjsr"] Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.880239 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-catalog-content\") pod \"certified-operators-chjsr\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.880302 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqkzp\" (UniqueName: \"kubernetes.io/projected/7f2e880a-9059-4659-9ba2-9b8990dc8f76-kube-api-access-lqkzp\") pod \"certified-operators-chjsr\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.880338 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-utilities\") pod \"certified-operators-chjsr\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.981604 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-catalog-content\") pod \"certified-operators-chjsr\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.981671 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqkzp\" (UniqueName: \"kubernetes.io/projected/7f2e880a-9059-4659-9ba2-9b8990dc8f76-kube-api-access-lqkzp\") pod \"certified-operators-chjsr\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.981722 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-utilities\") pod \"certified-operators-chjsr\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.982286 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-utilities\") pod \"certified-operators-chjsr\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:33 crc kubenswrapper[4690]: I0320 13:39:33.982577 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-catalog-content\") pod \"certified-operators-chjsr\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:34 crc kubenswrapper[4690]: I0320 13:39:34.007760 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqkzp\" (UniqueName: \"kubernetes.io/projected/7f2e880a-9059-4659-9ba2-9b8990dc8f76-kube-api-access-lqkzp\") pod \"certified-operators-chjsr\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:34 crc kubenswrapper[4690]: I0320 13:39:34.153226 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:34 crc kubenswrapper[4690]: I0320 13:39:34.434091 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-chjsr"] Mar 20 13:39:36 crc kubenswrapper[4690]: I0320 13:39:36.770068 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chjsr" event={"ID":"7f2e880a-9059-4659-9ba2-9b8990dc8f76","Type":"ContainerStarted","Data":"fbcc85b7eb413a49b660f070f489e378c0a803c343e130bd20035ad2d75487d7"} Mar 20 13:39:37 crc kubenswrapper[4690]: I0320 13:39:37.784042 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" event={"ID":"749b59ec-5f9a-41b3-a48c-c5746c3d0b43","Type":"ContainerStarted","Data":"c624bfcc3271d2cebf8acb4d37e887b1efc2ed47e27411a241f3f9e90d5f8e06"} Mar 20 13:39:37 crc kubenswrapper[4690]: I0320 13:39:37.784223 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" Mar 20 13:39:37 crc kubenswrapper[4690]: I0320 13:39:37.787732 4690 generic.go:334] "Generic (PLEG): container finished" podID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerID="28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d" exitCode=0 Mar 20 13:39:37 crc kubenswrapper[4690]: I0320 13:39:37.787785 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chjsr" event={"ID":"7f2e880a-9059-4659-9ba2-9b8990dc8f76","Type":"ContainerDied","Data":"28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d"} Mar 20 13:39:37 crc kubenswrapper[4690]: I0320 13:39:37.838625 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" podStartSLOduration=2.720529247 podStartE2EDuration="6.838594705s" podCreationTimestamp="2026-03-20 13:39:31 +0000 UTC" firstStartedPulling="2026-03-20 13:39:32.951001973 +0000 UTC m=+1019.240601916" lastFinishedPulling="2026-03-20 13:39:37.069067431 +0000 UTC m=+1023.358667374" observedRunningTime="2026-03-20 13:39:37.836896046 +0000 UTC m=+1024.126495989" watchObservedRunningTime="2026-03-20 13:39:37.838594705 +0000 UTC m=+1024.128194678" Mar 20 13:39:38 crc kubenswrapper[4690]: I0320 13:39:38.795619 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chjsr" event={"ID":"7f2e880a-9059-4659-9ba2-9b8990dc8f76","Type":"ContainerStarted","Data":"e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53"} Mar 20 13:39:39 crc kubenswrapper[4690]: I0320 13:39:39.807583 4690 generic.go:334] "Generic (PLEG): container finished" podID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerID="e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53" exitCode=0 Mar 20 13:39:39 crc kubenswrapper[4690]: I0320 13:39:39.807659 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chjsr" event={"ID":"7f2e880a-9059-4659-9ba2-9b8990dc8f76","Type":"ContainerDied","Data":"e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53"} Mar 20 13:39:40 crc kubenswrapper[4690]: I0320 13:39:40.816582 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chjsr" event={"ID":"7f2e880a-9059-4659-9ba2-9b8990dc8f76","Type":"ContainerStarted","Data":"8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8"} Mar 20 13:39:40 crc kubenswrapper[4690]: I0320 13:39:40.837329 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-chjsr" podStartSLOduration=5.412562607 podStartE2EDuration="7.837312661s" podCreationTimestamp="2026-03-20 13:39:33 +0000 UTC" firstStartedPulling="2026-03-20 13:39:37.790385584 +0000 UTC m=+1024.079985567" lastFinishedPulling="2026-03-20 13:39:40.215135638 +0000 UTC m=+1026.504735621" observedRunningTime="2026-03-20 13:39:40.835629373 +0000 UTC m=+1027.125229336" watchObservedRunningTime="2026-03-20 13:39:40.837312661 +0000 UTC m=+1027.126912604" Mar 20 13:39:42 crc kubenswrapper[4690]: I0320 13:39:42.713586 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-init-59b5998766-5npwc" Mar 20 13:39:44 crc kubenswrapper[4690]: I0320 13:39:44.154123 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:44 crc kubenswrapper[4690]: I0320 13:39:44.155731 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:44 crc kubenswrapper[4690]: I0320 13:39:44.196450 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:45 crc kubenswrapper[4690]: I0320 13:39:45.924516 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:47 crc kubenswrapper[4690]: I0320 13:39:47.609605 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-chjsr"] Mar 20 13:39:47 crc kubenswrapper[4690]: I0320 13:39:47.861471 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-chjsr" podUID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerName="registry-server" containerID="cri-o://8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8" gracePeriod=2 Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.280402 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.422981 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-utilities\") pod \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.423387 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-catalog-content\") pod \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.423433 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lqkzp\" (UniqueName: \"kubernetes.io/projected/7f2e880a-9059-4659-9ba2-9b8990dc8f76-kube-api-access-lqkzp\") pod \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\" (UID: \"7f2e880a-9059-4659-9ba2-9b8990dc8f76\") " Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.424714 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-utilities" (OuterVolumeSpecName: "utilities") pod "7f2e880a-9059-4659-9ba2-9b8990dc8f76" (UID: "7f2e880a-9059-4659-9ba2-9b8990dc8f76"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.428478 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7f2e880a-9059-4659-9ba2-9b8990dc8f76-kube-api-access-lqkzp" (OuterVolumeSpecName: "kube-api-access-lqkzp") pod "7f2e880a-9059-4659-9ba2-9b8990dc8f76" (UID: "7f2e880a-9059-4659-9ba2-9b8990dc8f76"). InnerVolumeSpecName "kube-api-access-lqkzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.482150 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7f2e880a-9059-4659-9ba2-9b8990dc8f76" (UID: "7f2e880a-9059-4659-9ba2-9b8990dc8f76"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.525052 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.525952 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f2e880a-9059-4659-9ba2-9b8990dc8f76-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.526000 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lqkzp\" (UniqueName: \"kubernetes.io/projected/7f2e880a-9059-4659-9ba2-9b8990dc8f76-kube-api-access-lqkzp\") on node \"crc\" DevicePath \"\"" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.870134 4690 generic.go:334] "Generic (PLEG): container finished" podID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerID="8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8" exitCode=0 Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.870195 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chjsr" event={"ID":"7f2e880a-9059-4659-9ba2-9b8990dc8f76","Type":"ContainerDied","Data":"8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8"} Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.870238 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-chjsr" event={"ID":"7f2e880a-9059-4659-9ba2-9b8990dc8f76","Type":"ContainerDied","Data":"fbcc85b7eb413a49b660f070f489e378c0a803c343e130bd20035ad2d75487d7"} Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.870238 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-chjsr" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.870266 4690 scope.go:117] "RemoveContainer" containerID="8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.898986 4690 scope.go:117] "RemoveContainer" containerID="e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.907146 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-chjsr"] Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.914153 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-chjsr"] Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.924124 4690 scope.go:117] "RemoveContainer" containerID="28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.939238 4690 scope.go:117] "RemoveContainer" containerID="8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8" Mar 20 13:39:48 crc kubenswrapper[4690]: E0320 13:39:48.939590 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8\": container with ID starting with 8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8 not found: ID does not exist" containerID="8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.939623 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8"} err="failed to get container status \"8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8\": rpc error: code = NotFound desc = could not find container \"8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8\": container with ID starting with 8f9999749ac14756433752b6d4f4e4aacf5f28b0c05e0214f0195c76709f61b8 not found: ID does not exist" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.939649 4690 scope.go:117] "RemoveContainer" containerID="e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53" Mar 20 13:39:48 crc kubenswrapper[4690]: E0320 13:39:48.939975 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53\": container with ID starting with e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53 not found: ID does not exist" containerID="e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.940007 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53"} err="failed to get container status \"e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53\": rpc error: code = NotFound desc = could not find container \"e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53\": container with ID starting with e42a5c27dc75d6ccbb71bea1b8d4b3937266fe70dc4ad8294de6557687d4ad53 not found: ID does not exist" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.940024 4690 scope.go:117] "RemoveContainer" containerID="28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d" Mar 20 13:39:48 crc kubenswrapper[4690]: E0320 13:39:48.940297 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d\": container with ID starting with 28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d not found: ID does not exist" containerID="28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d" Mar 20 13:39:48 crc kubenswrapper[4690]: I0320 13:39:48.940314 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d"} err="failed to get container status \"28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d\": rpc error: code = NotFound desc = could not find container \"28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d\": container with ID starting with 28d61321f889f4289c837db992da958f3e8e9b67afd0ee9ba3c3af636c43e12d not found: ID does not exist" Mar 20 13:39:50 crc kubenswrapper[4690]: I0320 13:39:50.424068 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" path="/var/lib/kubelet/pods/7f2e880a-9059-4659-9ba2-9b8990dc8f76/volumes" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.143688 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd"] Mar 20 13:40:00 crc kubenswrapper[4690]: E0320 13:40:00.144810 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerName="registry-server" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.144826 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerName="registry-server" Mar 20 13:40:00 crc kubenswrapper[4690]: E0320 13:40:00.144865 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerName="extract-content" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.144873 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerName="extract-content" Mar 20 13:40:00 crc kubenswrapper[4690]: E0320 13:40:00.144884 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerName="extract-utilities" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.144892 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerName="extract-utilities" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.145033 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="7f2e880a-9059-4659-9ba2-9b8990dc8f76" containerName="registry-server" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.145533 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.148297 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-hdkdh" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.148822 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566900-xlnh4"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.152297 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566900-xlnh4" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.159090 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.161179 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.163795 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.164466 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.171954 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566900-xlnh4"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.179980 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.180648 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.182948 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-vlmxs" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.198206 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.206376 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.207331 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.209752 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-xz9hh" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.242915 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.266916 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.267926 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.272176 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-clc7w" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.286175 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.315644 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.316137 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9prw\" (UniqueName: \"kubernetes.io/projected/f4df792a-6016-407b-8ff0-338ab8db08f7-kube-api-access-q9prw\") pod \"barbican-operator-controller-manager-59bc569d95-6jwcd\" (UID: \"f4df792a-6016-407b-8ff0-338ab8db08f7\") " pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.316185 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nf2ss\" (UniqueName: \"kubernetes.io/projected/92fadff7-ff1d-474b-8043-c0fb44d2e635-kube-api-access-nf2ss\") pod \"auto-csr-approver-29566900-xlnh4\" (UID: \"92fadff7-ff1d-474b-8043-c0fb44d2e635\") " pod="openshift-infra/auto-csr-approver-29566900-xlnh4" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.316242 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b75c\" (UniqueName: \"kubernetes.io/projected/509b5616-903e-4638-bcac-7db706a605fb-kube-api-access-8b75c\") pod \"cinder-operator-controller-manager-8d58dc466-kq824\" (UID: \"509b5616-903e-4638-bcac-7db706a605fb\") " pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.333350 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-g6tct" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.335393 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.365734 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.377891 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.391611 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-lqvrv" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.413616 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.467083 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b75c\" (UniqueName: \"kubernetes.io/projected/509b5616-903e-4638-bcac-7db706a605fb-kube-api-access-8b75c\") pod \"cinder-operator-controller-manager-8d58dc466-kq824\" (UID: \"509b5616-903e-4638-bcac-7db706a605fb\") " pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.467127 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6kh6\" (UniqueName: \"kubernetes.io/projected/67bfb5a2-f27a-48d1-829d-67c998495611-kube-api-access-g6kh6\") pod \"horizon-operator-controller-manager-8464cc45fb-sbjcq\" (UID: \"67bfb5a2-f27a-48d1-829d-67c998495611\") " pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.467169 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcg5x\" (UniqueName: \"kubernetes.io/projected/a602a20e-98c2-4eef-8a20-a873a5f04b56-kube-api-access-jcg5x\") pod \"heat-operator-controller-manager-67dd5f86f5-88wmc\" (UID: \"a602a20e-98c2-4eef-8a20-a873a5f04b56\") " pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.467217 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tflwr\" (UniqueName: \"kubernetes.io/projected/af375454-db79-4671-9be9-14e7b5927452-kube-api-access-tflwr\") pod \"designate-operator-controller-manager-588d4d986b-dls6p\" (UID: \"af375454-db79-4671-9be9-14e7b5927452\") " pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.467258 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5ct7\" (UniqueName: \"kubernetes.io/projected/11b96fe2-4da9-41a1-b4e3-31f5e17d8ad6-kube-api-access-j5ct7\") pod \"glance-operator-controller-manager-79df6bcc97-dws7r\" (UID: \"11b96fe2-4da9-41a1-b4e3-31f5e17d8ad6\") " pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.467316 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9prw\" (UniqueName: \"kubernetes.io/projected/f4df792a-6016-407b-8ff0-338ab8db08f7-kube-api-access-q9prw\") pod \"barbican-operator-controller-manager-59bc569d95-6jwcd\" (UID: \"f4df792a-6016-407b-8ff0-338ab8db08f7\") " pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.467336 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nf2ss\" (UniqueName: \"kubernetes.io/projected/92fadff7-ff1d-474b-8043-c0fb44d2e635-kube-api-access-nf2ss\") pod \"auto-csr-approver-29566900-xlnh4\" (UID: \"92fadff7-ff1d-474b-8043-c0fb44d2e635\") " pod="openshift-infra/auto-csr-approver-29566900-xlnh4" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.470313 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.483753 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.484930 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.511016 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-jcjw4" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.511586 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.512187 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.536577 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nf2ss\" (UniqueName: \"kubernetes.io/projected/92fadff7-ff1d-474b-8043-c0fb44d2e635-kube-api-access-nf2ss\") pod \"auto-csr-approver-29566900-xlnh4\" (UID: \"92fadff7-ff1d-474b-8043-c0fb44d2e635\") " pod="openshift-infra/auto-csr-approver-29566900-xlnh4" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.543939 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.544390 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b75c\" (UniqueName: \"kubernetes.io/projected/509b5616-903e-4638-bcac-7db706a605fb-kube-api-access-8b75c\") pod \"cinder-operator-controller-manager-8d58dc466-kq824\" (UID: \"509b5616-903e-4638-bcac-7db706a605fb\") " pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.545321 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.548493 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9prw\" (UniqueName: \"kubernetes.io/projected/f4df792a-6016-407b-8ff0-338ab8db08f7-kube-api-access-q9prw\") pod \"barbican-operator-controller-manager-59bc569d95-6jwcd\" (UID: \"f4df792a-6016-407b-8ff0-338ab8db08f7\") " pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.558103 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.561263 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.571380 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6kh6\" (UniqueName: \"kubernetes.io/projected/67bfb5a2-f27a-48d1-829d-67c998495611-kube-api-access-g6kh6\") pod \"horizon-operator-controller-manager-8464cc45fb-sbjcq\" (UID: \"67bfb5a2-f27a-48d1-829d-67c998495611\") " pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.571440 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcg5x\" (UniqueName: \"kubernetes.io/projected/a602a20e-98c2-4eef-8a20-a873a5f04b56-kube-api-access-jcg5x\") pod \"heat-operator-controller-manager-67dd5f86f5-88wmc\" (UID: \"a602a20e-98c2-4eef-8a20-a873a5f04b56\") " pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.571487 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tflwr\" (UniqueName: \"kubernetes.io/projected/af375454-db79-4671-9be9-14e7b5927452-kube-api-access-tflwr\") pod \"designate-operator-controller-manager-588d4d986b-dls6p\" (UID: \"af375454-db79-4671-9be9-14e7b5927452\") " pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.571527 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5ct7\" (UniqueName: \"kubernetes.io/projected/11b96fe2-4da9-41a1-b4e3-31f5e17d8ad6-kube-api-access-j5ct7\") pod \"glance-operator-controller-manager-79df6bcc97-dws7r\" (UID: \"11b96fe2-4da9-41a1-b4e3-31f5e17d8ad6\") " pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.573982 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-4rjrq" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.577108 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-lfjd7" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.588122 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.600986 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tflwr\" (UniqueName: \"kubernetes.io/projected/af375454-db79-4671-9be9-14e7b5927452-kube-api-access-tflwr\") pod \"designate-operator-controller-manager-588d4d986b-dls6p\" (UID: \"af375454-db79-4671-9be9-14e7b5927452\") " pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.601542 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.601614 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.602740 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.614174 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.616011 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcg5x\" (UniqueName: \"kubernetes.io/projected/a602a20e-98c2-4eef-8a20-a873a5f04b56-kube-api-access-jcg5x\") pod \"heat-operator-controller-manager-67dd5f86f5-88wmc\" (UID: \"a602a20e-98c2-4eef-8a20-a873a5f04b56\") " pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.617692 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-lld79" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.618023 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6kh6\" (UniqueName: \"kubernetes.io/projected/67bfb5a2-f27a-48d1-829d-67c998495611-kube-api-access-g6kh6\") pod \"horizon-operator-controller-manager-8464cc45fb-sbjcq\" (UID: \"67bfb5a2-f27a-48d1-829d-67c998495611\") " pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.630929 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.631766 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5ct7\" (UniqueName: \"kubernetes.io/projected/11b96fe2-4da9-41a1-b4e3-31f5e17d8ad6-kube-api-access-j5ct7\") pod \"glance-operator-controller-manager-79df6bcc97-dws7r\" (UID: \"11b96fe2-4da9-41a1-b4e3-31f5e17d8ad6\") " pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.631919 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.650326 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-xvmrs" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.665205 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.672591 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.672635 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qvmkn\" (UniqueName: \"kubernetes.io/projected/814ca78b-98a4-4e08-8c17-2ac1e45f3f70-kube-api-access-qvmkn\") pod \"ironic-operator-controller-manager-6f787dddc9-rxsjr\" (UID: \"814ca78b-98a4-4e08-8c17-2ac1e45f3f70\") " pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.672666 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8ckt\" (UniqueName: \"kubernetes.io/projected/54581816-9413-47c6-889c-1ae815299b20-kube-api-access-s8ckt\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.672730 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v5t95\" (UniqueName: \"kubernetes.io/projected/e6edfe21-12c6-4ca3-9992-c47b65455a25-kube-api-access-v5t95\") pod \"keystone-operator-controller-manager-768b96df4c-jfmgm\" (UID: \"e6edfe21-12c6-4ca3-9992-c47b65455a25\") " pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.706363 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.707337 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.718494 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-5kdzb" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.718799 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.736922 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.750085 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.769940 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.771272 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.776695 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ngwq2\" (UniqueName: \"kubernetes.io/projected/d30dfb8b-246a-461f-8230-4e12b67f8475-kube-api-access-ngwq2\") pod \"manila-operator-controller-manager-55f864c847-f2cbx\" (UID: \"d30dfb8b-246a-461f-8230-4e12b67f8475\") " pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.776763 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8ckt\" (UniqueName: \"kubernetes.io/projected/54581816-9413-47c6-889c-1ae815299b20-kube-api-access-s8ckt\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.776823 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v5t95\" (UniqueName: \"kubernetes.io/projected/e6edfe21-12c6-4ca3-9992-c47b65455a25-kube-api-access-v5t95\") pod \"keystone-operator-controller-manager-768b96df4c-jfmgm\" (UID: \"e6edfe21-12c6-4ca3-9992-c47b65455a25\") " pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.776860 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjqkz\" (UniqueName: \"kubernetes.io/projected/51eaf314-a93e-4736-b4ec-7de18291b971-kube-api-access-rjqkz\") pod \"nova-operator-controller-manager-5d488d59fb-54czp\" (UID: \"51eaf314-a93e-4736-b4ec-7de18291b971\") " pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.776883 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hkpn\" (UniqueName: \"kubernetes.io/projected/870fb6a4-04ea-4c9a-ae30-4acb7e4a050c-kube-api-access-8hkpn\") pod \"mariadb-operator-controller-manager-67ccfc9778-f5pph\" (UID: \"870fb6a4-04ea-4c9a-ae30-4acb7e4a050c\") " pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.776913 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.776936 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmgb4\" (UniqueName: \"kubernetes.io/projected/0b64d73c-d1f2-4823-b33c-f5b4e7cfa6a5-kube-api-access-xmgb4\") pod \"neutron-operator-controller-manager-767865f676-pt8zd\" (UID: \"0b64d73c-d1f2-4823-b33c-f5b4e7cfa6a5\") " pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.776957 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qvmkn\" (UniqueName: \"kubernetes.io/projected/814ca78b-98a4-4e08-8c17-2ac1e45f3f70-kube-api-access-qvmkn\") pod \"ironic-operator-controller-manager-6f787dddc9-rxsjr\" (UID: \"814ca78b-98a4-4e08-8c17-2ac1e45f3f70\") " pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" Mar 20 13:40:00 crc kubenswrapper[4690]: E0320 13:40:00.777288 4690 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:00 crc kubenswrapper[4690]: E0320 13:40:00.777331 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert podName:54581816-9413-47c6-889c-1ae815299b20 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:01.27731481 +0000 UTC m=+1047.566914753 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert") pod "infra-operator-controller-manager-669fff9c7c-qqp5b" (UID: "54581816-9413-47c6-889c-1ae815299b20") : secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.780232 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.790122 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566900-xlnh4" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.792995 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-bnt5t" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.802028 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.809887 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v5t95\" (UniqueName: \"kubernetes.io/projected/e6edfe21-12c6-4ca3-9992-c47b65455a25-kube-api-access-v5t95\") pod \"keystone-operator-controller-manager-768b96df4c-jfmgm\" (UID: \"e6edfe21-12c6-4ca3-9992-c47b65455a25\") " pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.811182 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qvmkn\" (UniqueName: \"kubernetes.io/projected/814ca78b-98a4-4e08-8c17-2ac1e45f3f70-kube-api-access-qvmkn\") pod \"ironic-operator-controller-manager-6f787dddc9-rxsjr\" (UID: \"814ca78b-98a4-4e08-8c17-2ac1e45f3f70\") " pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.811420 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8ckt\" (UniqueName: \"kubernetes.io/projected/54581816-9413-47c6-889c-1ae815299b20-kube-api-access-s8ckt\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.833282 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.864911 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.867761 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.868485 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.872098 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-zcvfz" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.875588 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.877448 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjqkz\" (UniqueName: \"kubernetes.io/projected/51eaf314-a93e-4736-b4ec-7de18291b971-kube-api-access-rjqkz\") pod \"nova-operator-controller-manager-5d488d59fb-54czp\" (UID: \"51eaf314-a93e-4736-b4ec-7de18291b971\") " pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.877481 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44vhr\" (UniqueName: \"kubernetes.io/projected/15a47805-6294-4f70-ba77-d22857c579b9-kube-api-access-44vhr\") pod \"octavia-operator-controller-manager-5b9f45d989-x5l86\" (UID: \"15a47805-6294-4f70-ba77-d22857c579b9\") " pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.877505 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hkpn\" (UniqueName: \"kubernetes.io/projected/870fb6a4-04ea-4c9a-ae30-4acb7e4a050c-kube-api-access-8hkpn\") pod \"mariadb-operator-controller-manager-67ccfc9778-f5pph\" (UID: \"870fb6a4-04ea-4c9a-ae30-4acb7e4a050c\") " pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.877550 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmgb4\" (UniqueName: \"kubernetes.io/projected/0b64d73c-d1f2-4823-b33c-f5b4e7cfa6a5-kube-api-access-xmgb4\") pod \"neutron-operator-controller-manager-767865f676-pt8zd\" (UID: \"0b64d73c-d1f2-4823-b33c-f5b4e7cfa6a5\") " pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.877571 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ngwq2\" (UniqueName: \"kubernetes.io/projected/d30dfb8b-246a-461f-8230-4e12b67f8475-kube-api-access-ngwq2\") pod \"manila-operator-controller-manager-55f864c847-f2cbx\" (UID: \"d30dfb8b-246a-461f-8230-4e12b67f8475\") " pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.889206 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.890403 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.891407 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.893437 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-m5gdj" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.898119 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.917898 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.918692 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.922754 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.938331 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-97ks5" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.938648 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hkpn\" (UniqueName: \"kubernetes.io/projected/870fb6a4-04ea-4c9a-ae30-4acb7e4a050c-kube-api-access-8hkpn\") pod \"mariadb-operator-controller-manager-67ccfc9778-f5pph\" (UID: \"870fb6a4-04ea-4c9a-ae30-4acb7e4a050c\") " pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.941338 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjqkz\" (UniqueName: \"kubernetes.io/projected/51eaf314-a93e-4736-b4ec-7de18291b971-kube-api-access-rjqkz\") pod \"nova-operator-controller-manager-5d488d59fb-54czp\" (UID: \"51eaf314-a93e-4736-b4ec-7de18291b971\") " pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.945435 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmgb4\" (UniqueName: \"kubernetes.io/projected/0b64d73c-d1f2-4823-b33c-f5b4e7cfa6a5-kube-api-access-xmgb4\") pod \"neutron-operator-controller-manager-767865f676-pt8zd\" (UID: \"0b64d73c-d1f2-4823-b33c-f5b4e7cfa6a5\") " pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.946525 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.948318 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.952218 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-qdrd9" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.952625 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.956290 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.956388 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ngwq2\" (UniqueName: \"kubernetes.io/projected/d30dfb8b-246a-461f-8230-4e12b67f8475-kube-api-access-ngwq2\") pod \"manila-operator-controller-manager-55f864c847-f2cbx\" (UID: \"d30dfb8b-246a-461f-8230-4e12b67f8475\") " pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.976911 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z"] Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.983302 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.984539 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44vhr\" (UniqueName: \"kubernetes.io/projected/15a47805-6294-4f70-ba77-d22857c579b9-kube-api-access-44vhr\") pod \"octavia-operator-controller-manager-5b9f45d989-x5l86\" (UID: \"15a47805-6294-4f70-ba77-d22857c579b9\") " pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.984578 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92hfz\" (UniqueName: \"kubernetes.io/projected/775748cf-df5a-466b-9c3f-057ca3ed36ab-kube-api-access-92hfz\") pod \"placement-operator-controller-manager-5784578c99-hhg2z\" (UID: \"775748cf-df5a-466b-9c3f-057ca3ed36ab\") " pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" Mar 20 13:40:00 crc kubenswrapper[4690]: I0320 13:40:00.998257 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-c674c5965-dktfp"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.000157 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.004638 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-lrj6j" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.008061 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.008660 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.009561 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.011390 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-s4zhx" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.014720 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44vhr\" (UniqueName: \"kubernetes.io/projected/15a47805-6294-4f70-ba77-d22857c579b9-kube-api-access-44vhr\") pod \"octavia-operator-controller-manager-5b9f45d989-x5l86\" (UID: \"15a47805-6294-4f70-ba77-d22857c579b9\") " pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.025697 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.042327 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.043608 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.044539 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.045935 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-lxvxr" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.075238 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-c674c5965-dktfp"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.086484 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbrbc\" (UniqueName: \"kubernetes.io/projected/f13f1ec1-31f1-4492-876d-42ad21a46373-kube-api-access-tbrbc\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.086531 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs62d\" (UniqueName: \"kubernetes.io/projected/be5cbf2c-ee29-4c1a-9eca-50c8069886fd-kube-api-access-rs62d\") pod \"swift-operator-controller-manager-c674c5965-dktfp\" (UID: \"be5cbf2c-ee29-4c1a-9eca-50c8069886fd\") " pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.086587 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5jrt\" (UniqueName: \"kubernetes.io/projected/49cc1968-cbc1-432a-a952-dec062db3bd5-kube-api-access-g5jrt\") pod \"telemetry-operator-controller-manager-d6b694c5-9bx66\" (UID: \"49cc1968-cbc1-432a-a952-dec062db3bd5\") " pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.086611 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92hfz\" (UniqueName: \"kubernetes.io/projected/775748cf-df5a-466b-9c3f-057ca3ed36ab-kube-api-access-92hfz\") pod \"placement-operator-controller-manager-5784578c99-hhg2z\" (UID: \"775748cf-df5a-466b-9c3f-057ca3ed36ab\") " pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.086639 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.086684 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c42j8\" (UniqueName: \"kubernetes.io/projected/aa8d9ef3-cf4c-4083-bb69-579193795ffb-kube-api-access-c42j8\") pod \"test-operator-controller-manager-5c5cb9c4d7-gvbnp\" (UID: \"aa8d9ef3-cf4c-4083-bb69-579193795ffb\") " pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.086732 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w4p9g\" (UniqueName: \"kubernetes.io/projected/ace63477-4c50-499f-958c-11135ab6a1a2-kube-api-access-w4p9g\") pod \"ovn-operator-controller-manager-884679f54-5d2n4\" (UID: \"ace63477-4c50-499f-958c-11135ab6a1a2\") " pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.098263 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.099243 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.103231 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-q266q" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.107286 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.112649 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92hfz\" (UniqueName: \"kubernetes.io/projected/775748cf-df5a-466b-9c3f-057ca3ed36ab-kube-api-access-92hfz\") pod \"placement-operator-controller-manager-5784578c99-hhg2z\" (UID: \"775748cf-df5a-466b-9c3f-057ca3ed36ab\") " pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.115435 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.120130 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.183580 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.189424 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w4p9g\" (UniqueName: \"kubernetes.io/projected/ace63477-4c50-499f-958c-11135ab6a1a2-kube-api-access-w4p9g\") pod \"ovn-operator-controller-manager-884679f54-5d2n4\" (UID: \"ace63477-4c50-499f-958c-11135ab6a1a2\") " pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.189480 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbrbc\" (UniqueName: \"kubernetes.io/projected/f13f1ec1-31f1-4492-876d-42ad21a46373-kube-api-access-tbrbc\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.189514 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs62d\" (UniqueName: \"kubernetes.io/projected/be5cbf2c-ee29-4c1a-9eca-50c8069886fd-kube-api-access-rs62d\") pod \"swift-operator-controller-manager-c674c5965-dktfp\" (UID: \"be5cbf2c-ee29-4c1a-9eca-50c8069886fd\") " pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.189541 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5jrt\" (UniqueName: \"kubernetes.io/projected/49cc1968-cbc1-432a-a952-dec062db3bd5-kube-api-access-g5jrt\") pod \"telemetry-operator-controller-manager-d6b694c5-9bx66\" (UID: \"49cc1968-cbc1-432a-a952-dec062db3bd5\") " pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.189564 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.189603 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c42j8\" (UniqueName: \"kubernetes.io/projected/aa8d9ef3-cf4c-4083-bb69-579193795ffb-kube-api-access-c42j8\") pod \"test-operator-controller-manager-5c5cb9c4d7-gvbnp\" (UID: \"aa8d9ef3-cf4c-4083-bb69-579193795ffb\") " pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.191053 4690 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.191096 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert podName:f13f1ec1-31f1-4492-876d-42ad21a46373 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:01.691082857 +0000 UTC m=+1047.980682800 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert") pod "openstack-baremetal-operator-controller-manager-86657c54f5bfk29" (UID: "f13f1ec1-31f1-4492-876d-42ad21a46373") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.221875 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w4p9g\" (UniqueName: \"kubernetes.io/projected/ace63477-4c50-499f-958c-11135ab6a1a2-kube-api-access-w4p9g\") pod \"ovn-operator-controller-manager-884679f54-5d2n4\" (UID: \"ace63477-4c50-499f-958c-11135ab6a1a2\") " pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.222355 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c42j8\" (UniqueName: \"kubernetes.io/projected/aa8d9ef3-cf4c-4083-bb69-579193795ffb-kube-api-access-c42j8\") pod \"test-operator-controller-manager-5c5cb9c4d7-gvbnp\" (UID: \"aa8d9ef3-cf4c-4083-bb69-579193795ffb\") " pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.222722 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5jrt\" (UniqueName: \"kubernetes.io/projected/49cc1968-cbc1-432a-a952-dec062db3bd5-kube-api-access-g5jrt\") pod \"telemetry-operator-controller-manager-d6b694c5-9bx66\" (UID: \"49cc1968-cbc1-432a-a952-dec062db3bd5\") " pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.224288 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.225440 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbrbc\" (UniqueName: \"kubernetes.io/projected/f13f1ec1-31f1-4492-876d-42ad21a46373-kube-api-access-tbrbc\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.225637 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.227023 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs62d\" (UniqueName: \"kubernetes.io/projected/be5cbf2c-ee29-4c1a-9eca-50c8069886fd-kube-api-access-rs62d\") pod \"swift-operator-controller-manager-c674c5965-dktfp\" (UID: \"be5cbf2c-ee29-4c1a-9eca-50c8069886fd\") " pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.235521 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.235758 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.235995 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-7v99r" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.264701 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.290505 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.290691 4690 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.290760 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert podName:54581816-9413-47c6-889c-1ae815299b20 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:02.290740031 +0000 UTC m=+1048.580339974 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert") pod "infra-operator-controller-manager-669fff9c7c-qqp5b" (UID: "54581816-9413-47c6-889c-1ae815299b20") : secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.290919 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crwtn\" (UniqueName: \"kubernetes.io/projected/a0d899fb-25be-4485-9d4d-77ca047c1524-kube-api-access-crwtn\") pod \"watcher-operator-controller-manager-6c4d75f7f9-pfj42\" (UID: \"a0d899fb-25be-4485-9d4d-77ca047c1524\") " pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.312376 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.351237 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.370097 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.394637 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crwtn\" (UniqueName: \"kubernetes.io/projected/a0d899fb-25be-4485-9d4d-77ca047c1524-kube-api-access-crwtn\") pod \"watcher-operator-controller-manager-6c4d75f7f9-pfj42\" (UID: \"a0d899fb-25be-4485-9d4d-77ca047c1524\") " pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.394719 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.394772 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfn64\" (UniqueName: \"kubernetes.io/projected/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-kube-api-access-vfn64\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.394835 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.395744 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc"] Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.413609 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crwtn\" (UniqueName: \"kubernetes.io/projected/a0d899fb-25be-4485-9d4d-77ca047c1524-kube-api-access-crwtn\") pod \"watcher-operator-controller-manager-6c4d75f7f9-pfj42\" (UID: \"a0d899fb-25be-4485-9d4d-77ca047c1524\") " pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.420359 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.442952 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.495442 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.495744 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.495795 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfn64\" (UniqueName: \"kubernetes.io/projected/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-kube-api-access-vfn64\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.495840 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.496022 4690 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.496073 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:01.99605619 +0000 UTC m=+1048.285656133 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "webhook-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.496079 4690 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.496121 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:01.996105341 +0000 UTC m=+1048.285705274 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "metrics-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.517728 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfn64\" (UniqueName: \"kubernetes.io/projected/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-kube-api-access-vfn64\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.521496 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.700252 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.700713 4690 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: E0320 13:40:01.700765 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert podName:f13f1ec1-31f1-4492-876d-42ad21a46373 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:02.700750801 +0000 UTC m=+1048.990350744 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert") pod "openstack-baremetal-operator-controller-manager-86657c54f5bfk29" (UID: "f13f1ec1-31f1-4492-876d-42ad21a46373") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.711882 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq"] Mar 20 13:40:01 crc kubenswrapper[4690]: W0320 13:40:01.723554 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67bfb5a2_f27a_48d1_829d_67c998495611.slice/crio-53c7a56d0a72a922ff2fc9f453415574da05602df5c4b80e0e20e13e0dbff92d WatchSource:0}: Error finding container 53c7a56d0a72a922ff2fc9f453415574da05602df5c4b80e0e20e13e0dbff92d: Status 404 returned error can't find the container with id 53c7a56d0a72a922ff2fc9f453415574da05602df5c4b80e0e20e13e0dbff92d Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.951504 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" event={"ID":"67bfb5a2-f27a-48d1-829d-67c998495611","Type":"ContainerStarted","Data":"53c7a56d0a72a922ff2fc9f453415574da05602df5c4b80e0e20e13e0dbff92d"} Mar 20 13:40:01 crc kubenswrapper[4690]: I0320 13:40:01.953910 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" event={"ID":"a602a20e-98c2-4eef-8a20-a873a5f04b56","Type":"ContainerStarted","Data":"d0c4f68154c7cf88f2ba8e94f6eca1744a8ae9c3cb5df4ed4222e5b3861ea709"} Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.008715 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.009022 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.009098 4690 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.009142 4690 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.009170 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:03.009152561 +0000 UTC m=+1049.298752504 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "webhook-server-cert" not found Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.009193 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:03.009178871 +0000 UTC m=+1049.298778814 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "metrics-server-cert" not found Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.047804 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.054136 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.060242 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.084494 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr"] Mar 20 13:40:02 crc kubenswrapper[4690]: W0320 13:40:02.099165 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod509b5616_903e_4638_bcac_7db706a605fb.slice/crio-576c2c460aacdf94ff4dfa44fc2a3bda7997df3556bef3ece918e2d0de902168 WatchSource:0}: Error finding container 576c2c460aacdf94ff4dfa44fc2a3bda7997df3556bef3ece918e2d0de902168: Status 404 returned error can't find the container with id 576c2c460aacdf94ff4dfa44fc2a3bda7997df3556bef3ece918e2d0de902168 Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.118132 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.133234 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.136373 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.142404 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.147094 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.173198 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566900-xlnh4"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.178104 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp"] Mar 20 13:40:02 crc kubenswrapper[4690]: W0320 13:40:02.178713 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92fadff7_ff1d_474b_8043_c0fb44d2e635.slice/crio-359a73afedd22b757ad22052dc1a6c18c1ac850c87fa95ce8be863b47d039d4f WatchSource:0}: Error finding container 359a73afedd22b757ad22052dc1a6c18c1ac850c87fa95ce8be863b47d039d4f: Status 404 returned error can't find the container with id 359a73afedd22b757ad22052dc1a6c18c1ac850c87fa95ce8be863b47d039d4f Mar 20 13:40:02 crc kubenswrapper[4690]: W0320 13:40:02.181882 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51eaf314_a93e_4736_b4ec_7de18291b971.slice/crio-e2a0b6fff306ff48d785c5521fa8e0013c32bc1a8e096ffd77d72701a4900871 WatchSource:0}: Error finding container e2a0b6fff306ff48d785c5521fa8e0013c32bc1a8e096ffd77d72701a4900871: Status 404 returned error can't find the container with id e2a0b6fff306ff48d785c5521fa8e0013c32bc1a8e096ffd77d72701a4900871 Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.187723 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:7398eb8fa5a4844d3326a5dff759d17199870c389b3ce3011a038b27bf95512a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-rjqkz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-5d488d59fb-54czp_openstack-operators(51eaf314-a93e-4736-b4ec-7de18291b971): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.189033 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" podUID="51eaf314-a93e-4736-b4ec-7de18291b971" Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.315818 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.316046 4690 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.316101 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert podName:54581816-9413-47c6-889c-1ae815299b20 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:04.316083969 +0000 UTC m=+1050.605683912 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert") pod "infra-operator-controller-manager-669fff9c7c-qqp5b" (UID: "54581816-9413-47c6-889c-1ae815299b20") : secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.409718 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-c674c5965-dktfp"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.428613 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z"] Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.428661 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66"] Mar 20 13:40:02 crc kubenswrapper[4690]: W0320 13:40:02.432954 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod775748cf_df5a_466b_9c3f_057ca3ed36ab.slice/crio-110f4cf4ce295171229a27ed03d1a4624e7d7a292ace33eeafbc2344f5682d19 WatchSource:0}: Error finding container 110f4cf4ce295171229a27ed03d1a4624e7d7a292ace33eeafbc2344f5682d19: Status 404 returned error can't find the container with id 110f4cf4ce295171229a27ed03d1a4624e7d7a292ace33eeafbc2344f5682d19 Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.437600 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp"] Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.442573 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:c8743a6661d118b0e5ba3eb110643358a8a3237dc75984a8f9829880b55a1622,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-92hfz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-5784578c99-hhg2z_openstack-operators(775748cf-df5a-466b-9c3f-057ca3ed36ab): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.446353 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4"] Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.446383 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" podUID="775748cf-df5a-466b-9c3f-057ca3ed36ab" Mar 20 13:40:02 crc kubenswrapper[4690]: W0320 13:40:02.449818 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podace63477_4c50_499f_958c_11135ab6a1a2.slice/crio-415d2fd7ed1c72aac7023609577a18e98c3e7cc75dd5a7cc71fc39cac6e4706c WatchSource:0}: Error finding container 415d2fd7ed1c72aac7023609577a18e98c3e7cc75dd5a7cc71fc39cac6e4706c: Status 404 returned error can't find the container with id 415d2fd7ed1c72aac7023609577a18e98c3e7cc75dd5a7cc71fc39cac6e4706c Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.457637 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86"] Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.459970 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:bef93f71d3b42a72d8b96c69bdb4db4b8bd797c5093a0a719443d7a5c9aaab55,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-w4p9g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-884679f54-5d2n4_openstack-operators(ace63477-4c50-499f-958c-11135ab6a1a2): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.460248 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:c500fa7080b94105e85eeced772d8872e4168904e74ba02116e15ab66f522444,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-g5jrt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-d6b694c5-9bx66_openstack-operators(49cc1968-cbc1-432a-a952-dec062db3bd5): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.461900 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" podUID="49cc1968-cbc1-432a-a952-dec062db3bd5" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.461944 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" podUID="ace63477-4c50-499f-958c-11135ab6a1a2" Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.463772 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42"] Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.472589 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:425fd66675becbe0ca2b2fe1a5a6694ac6e0b1cdce9a77a7a37f99785eadc74a,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-44vhr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-5b9f45d989-x5l86_openstack-operators(15a47805-6294-4f70-ba77-d22857c579b9): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.473518 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:d9c55e8c6304a0e32289b5e8c69a87ea59b9968918a5c85b7c384633df82c807,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-crwtn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-6c4d75f7f9-pfj42_openstack-operators(a0d899fb-25be-4485-9d4d-77ca047c1524): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.473667 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" podUID="15a47805-6294-4f70-ba77-d22857c579b9" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.475066 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" podUID="a0d899fb-25be-4485-9d4d-77ca047c1524" Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.719715 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.719995 4690 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.720240 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert podName:f13f1ec1-31f1-4492-876d-42ad21a46373 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:04.720223802 +0000 UTC m=+1051.009823745 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert") pod "openstack-baremetal-operator-controller-manager-86657c54f5bfk29" (UID: "f13f1ec1-31f1-4492-876d-42ad21a46373") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.969592 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" event={"ID":"814ca78b-98a4-4e08-8c17-2ac1e45f3f70","Type":"ContainerStarted","Data":"047f81a3ddae928702120d13eaa22dcdbc2d96d6206110d597479158ce1feb79"} Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.972198 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" event={"ID":"e6edfe21-12c6-4ca3-9992-c47b65455a25","Type":"ContainerStarted","Data":"da0cc807a6befc993020ebe2ba3e5de464a10b0bac6f5b77afdacfdb2098db86"} Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.973302 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" event={"ID":"ace63477-4c50-499f-958c-11135ab6a1a2","Type":"ContainerStarted","Data":"415d2fd7ed1c72aac7023609577a18e98c3e7cc75dd5a7cc71fc39cac6e4706c"} Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.976221 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:bef93f71d3b42a72d8b96c69bdb4db4b8bd797c5093a0a719443d7a5c9aaab55\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" podUID="ace63477-4c50-499f-958c-11135ab6a1a2" Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.976245 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" event={"ID":"49cc1968-cbc1-432a-a952-dec062db3bd5","Type":"ContainerStarted","Data":"171468fa10e485deae3803d76e1de7aa62741fe22d04ae4af3e5cc67e3c3fb83"} Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.981782 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:c500fa7080b94105e85eeced772d8872e4168904e74ba02116e15ab66f522444\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" podUID="49cc1968-cbc1-432a-a952-dec062db3bd5" Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.982001 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566900-xlnh4" event={"ID":"92fadff7-ff1d-474b-8043-c0fb44d2e635","Type":"ContainerStarted","Data":"359a73afedd22b757ad22052dc1a6c18c1ac850c87fa95ce8be863b47d039d4f"} Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.983466 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" event={"ID":"a0d899fb-25be-4485-9d4d-77ca047c1524","Type":"ContainerStarted","Data":"4b98648f9fc571cbdd923f86ff4266fe8ba67b7a828dda897ace9e9feec79f9f"} Mar 20 13:40:02 crc kubenswrapper[4690]: E0320 13:40:02.984522 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:d9c55e8c6304a0e32289b5e8c69a87ea59b9968918a5c85b7c384633df82c807\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" podUID="a0d899fb-25be-4485-9d4d-77ca047c1524" Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.985510 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" event={"ID":"d30dfb8b-246a-461f-8230-4e12b67f8475","Type":"ContainerStarted","Data":"2c038694fbed0a5dcc61397258d4ce6edb287a8d403b5a9e3cbe4119e59a05c2"} Mar 20 13:40:02 crc kubenswrapper[4690]: I0320 13:40:02.990605 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" event={"ID":"f4df792a-6016-407b-8ff0-338ab8db08f7","Type":"ContainerStarted","Data":"0c3ca813c7554f63d98f3bc28df71a5db9d0f562f595bf368c61df45d8d668fd"} Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:02.993958 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" event={"ID":"51eaf314-a93e-4736-b4ec-7de18291b971","Type":"ContainerStarted","Data":"e2a0b6fff306ff48d785c5521fa8e0013c32bc1a8e096ffd77d72701a4900871"} Mar 20 13:40:03 crc kubenswrapper[4690]: E0320 13:40:02.995660 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:7398eb8fa5a4844d3326a5dff759d17199870c389b3ce3011a038b27bf95512a\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" podUID="51eaf314-a93e-4736-b4ec-7de18291b971" Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:02.996556 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" event={"ID":"be5cbf2c-ee29-4c1a-9eca-50c8069886fd","Type":"ContainerStarted","Data":"4191d199e5e939c02bff5f702e4d521ead5a90422954f84359ce6fde3e494954"} Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.006885 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" event={"ID":"15a47805-6294-4f70-ba77-d22857c579b9","Type":"ContainerStarted","Data":"1b692411cfaf023ca96ff82c896c256eedef1634673275a50047e3bca0943115"} Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.009599 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" event={"ID":"509b5616-903e-4638-bcac-7db706a605fb","Type":"ContainerStarted","Data":"576c2c460aacdf94ff4dfa44fc2a3bda7997df3556bef3ece918e2d0de902168"} Mar 20 13:40:03 crc kubenswrapper[4690]: E0320 13:40:03.009815 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:425fd66675becbe0ca2b2fe1a5a6694ac6e0b1cdce9a77a7a37f99785eadc74a\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" podUID="15a47805-6294-4f70-ba77-d22857c579b9" Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.010928 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" event={"ID":"775748cf-df5a-466b-9c3f-057ca3ed36ab","Type":"ContainerStarted","Data":"110f4cf4ce295171229a27ed03d1a4624e7d7a292ace33eeafbc2344f5682d19"} Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.012225 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" event={"ID":"af375454-db79-4671-9be9-14e7b5927452","Type":"ContainerStarted","Data":"65a6542c275bc4f6b5917468d4a865dfc23b63fbce8c1d83b08335729ff0080e"} Mar 20 13:40:03 crc kubenswrapper[4690]: E0320 13:40:03.012340 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:c8743a6661d118b0e5ba3eb110643358a8a3237dc75984a8f9829880b55a1622\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" podUID="775748cf-df5a-466b-9c3f-057ca3ed36ab" Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.013938 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" event={"ID":"aa8d9ef3-cf4c-4083-bb69-579193795ffb","Type":"ContainerStarted","Data":"2318fc0505be7e1fab9377449c1077cd0619e72b18e46163cefb03c5447e9355"} Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.015876 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" event={"ID":"870fb6a4-04ea-4c9a-ae30-4acb7e4a050c","Type":"ContainerStarted","Data":"34008e745ae27eb6d4638e5ef6805c905a35c54009327a1e3272243ae80b6a82"} Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.017004 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" event={"ID":"0b64d73c-d1f2-4823-b33c-f5b4e7cfa6a5","Type":"ContainerStarted","Data":"ad5d73864266fc6e89ce0c40ef4383f44ea3df63249190f0a631c723595fc728"} Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.020021 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" event={"ID":"11b96fe2-4da9-41a1-b4e3-31f5e17d8ad6","Type":"ContainerStarted","Data":"3cf74e39370742c2e83d25cc5c729faace8b44609aa89a83fe52d256c6f6d555"} Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.022817 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:03 crc kubenswrapper[4690]: I0320 13:40:03.022942 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:03 crc kubenswrapper[4690]: E0320 13:40:03.022985 4690 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 13:40:03 crc kubenswrapper[4690]: E0320 13:40:03.023052 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:05.023018123 +0000 UTC m=+1051.312618066 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "metrics-server-cert" not found Mar 20 13:40:03 crc kubenswrapper[4690]: E0320 13:40:03.023611 4690 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 13:40:03 crc kubenswrapper[4690]: E0320 13:40:03.023911 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:05.023896788 +0000 UTC m=+1051.313496801 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "webhook-server-cert" not found Mar 20 13:40:04 crc kubenswrapper[4690]: I0320 13:40:04.028483 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566900-xlnh4" event={"ID":"92fadff7-ff1d-474b-8043-c0fb44d2e635","Type":"ContainerStarted","Data":"c7ee29cec07ad217a06541d2db38c4e16b1b44ed4f6201aa4baa7c7d0229eded"} Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.029555 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:bef93f71d3b42a72d8b96c69bdb4db4b8bd797c5093a0a719443d7a5c9aaab55\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" podUID="ace63477-4c50-499f-958c-11135ab6a1a2" Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.030125 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:c500fa7080b94105e85eeced772d8872e4168904e74ba02116e15ab66f522444\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" podUID="49cc1968-cbc1-432a-a952-dec062db3bd5" Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.030982 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:d9c55e8c6304a0e32289b5e8c69a87ea59b9968918a5c85b7c384633df82c807\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" podUID="a0d899fb-25be-4485-9d4d-77ca047c1524" Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.031085 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:c8743a6661d118b0e5ba3eb110643358a8a3237dc75984a8f9829880b55a1622\\\"\"" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" podUID="775748cf-df5a-466b-9c3f-057ca3ed36ab" Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.031430 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:7398eb8fa5a4844d3326a5dff759d17199870c389b3ce3011a038b27bf95512a\\\"\"" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" podUID="51eaf314-a93e-4736-b4ec-7de18291b971" Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.035385 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:425fd66675becbe0ca2b2fe1a5a6694ac6e0b1cdce9a77a7a37f99785eadc74a\\\"\"" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" podUID="15a47805-6294-4f70-ba77-d22857c579b9" Mar 20 13:40:04 crc kubenswrapper[4690]: I0320 13:40:04.068086 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-infra/auto-csr-approver-29566900-xlnh4" podStartSLOduration=2.870033623 podStartE2EDuration="4.068065862s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.180658618 +0000 UTC m=+1048.470258561" lastFinishedPulling="2026-03-20 13:40:03.378690857 +0000 UTC m=+1049.668290800" observedRunningTime="2026-03-20 13:40:04.058470479 +0000 UTC m=+1050.348070422" watchObservedRunningTime="2026-03-20 13:40:04.068065862 +0000 UTC m=+1050.357665805" Mar 20 13:40:04 crc kubenswrapper[4690]: I0320 13:40:04.352467 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.352610 4690 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.352658 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert podName:54581816-9413-47c6-889c-1ae815299b20 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:08.352643885 +0000 UTC m=+1054.642243828 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert") pod "infra-operator-controller-manager-669fff9c7c-qqp5b" (UID: "54581816-9413-47c6-889c-1ae815299b20") : secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:04 crc kubenswrapper[4690]: I0320 13:40:04.759824 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.760272 4690 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:04 crc kubenswrapper[4690]: E0320 13:40:04.760316 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert podName:f13f1ec1-31f1-4492-876d-42ad21a46373 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:08.760303008 +0000 UTC m=+1055.049902941 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert") pod "openstack-baremetal-operator-controller-manager-86657c54f5bfk29" (UID: "f13f1ec1-31f1-4492-876d-42ad21a46373") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:05 crc kubenswrapper[4690]: I0320 13:40:05.035900 4690 generic.go:334] "Generic (PLEG): container finished" podID="92fadff7-ff1d-474b-8043-c0fb44d2e635" containerID="c7ee29cec07ad217a06541d2db38c4e16b1b44ed4f6201aa4baa7c7d0229eded" exitCode=0 Mar 20 13:40:05 crc kubenswrapper[4690]: I0320 13:40:05.035957 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566900-xlnh4" event={"ID":"92fadff7-ff1d-474b-8043-c0fb44d2e635","Type":"ContainerDied","Data":"c7ee29cec07ad217a06541d2db38c4e16b1b44ed4f6201aa4baa7c7d0229eded"} Mar 20 13:40:05 crc kubenswrapper[4690]: I0320 13:40:05.063994 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:05 crc kubenswrapper[4690]: I0320 13:40:05.064104 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:05 crc kubenswrapper[4690]: E0320 13:40:05.064190 4690 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 13:40:05 crc kubenswrapper[4690]: E0320 13:40:05.064243 4690 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 13:40:05 crc kubenswrapper[4690]: E0320 13:40:05.064275 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:09.064253501 +0000 UTC m=+1055.353853444 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "metrics-server-cert" not found Mar 20 13:40:05 crc kubenswrapper[4690]: E0320 13:40:05.064305 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:09.064286152 +0000 UTC m=+1055.353886165 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "webhook-server-cert" not found Mar 20 13:40:06 crc kubenswrapper[4690]: I0320 13:40:06.808118 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566900-xlnh4" Mar 20 13:40:06 crc kubenswrapper[4690]: I0320 13:40:06.887709 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nf2ss\" (UniqueName: \"kubernetes.io/projected/92fadff7-ff1d-474b-8043-c0fb44d2e635-kube-api-access-nf2ss\") pod \"92fadff7-ff1d-474b-8043-c0fb44d2e635\" (UID: \"92fadff7-ff1d-474b-8043-c0fb44d2e635\") " Mar 20 13:40:06 crc kubenswrapper[4690]: I0320 13:40:06.895288 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92fadff7-ff1d-474b-8043-c0fb44d2e635-kube-api-access-nf2ss" (OuterVolumeSpecName: "kube-api-access-nf2ss") pod "92fadff7-ff1d-474b-8043-c0fb44d2e635" (UID: "92fadff7-ff1d-474b-8043-c0fb44d2e635"). InnerVolumeSpecName "kube-api-access-nf2ss". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:40:06 crc kubenswrapper[4690]: I0320 13:40:06.990084 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nf2ss\" (UniqueName: \"kubernetes.io/projected/92fadff7-ff1d-474b-8043-c0fb44d2e635-kube-api-access-nf2ss\") on node \"crc\" DevicePath \"\"" Mar 20 13:40:07 crc kubenswrapper[4690]: I0320 13:40:07.053376 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566900-xlnh4" event={"ID":"92fadff7-ff1d-474b-8043-c0fb44d2e635","Type":"ContainerDied","Data":"359a73afedd22b757ad22052dc1a6c18c1ac850c87fa95ce8be863b47d039d4f"} Mar 20 13:40:07 crc kubenswrapper[4690]: I0320 13:40:07.053422 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566900-xlnh4" Mar 20 13:40:07 crc kubenswrapper[4690]: I0320 13:40:07.053431 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="359a73afedd22b757ad22052dc1a6c18c1ac850c87fa95ce8be863b47d039d4f" Mar 20 13:40:07 crc kubenswrapper[4690]: I0320 13:40:07.874755 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566894-zcjr2"] Mar 20 13:40:07 crc kubenswrapper[4690]: I0320 13:40:07.881650 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566894-zcjr2"] Mar 20 13:40:08 crc kubenswrapper[4690]: I0320 13:40:08.422477 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed808ce2-4c53-47b1-83c0-fb041145f034" path="/var/lib/kubelet/pods/ed808ce2-4c53-47b1-83c0-fb041145f034/volumes" Mar 20 13:40:08 crc kubenswrapper[4690]: I0320 13:40:08.431268 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:08 crc kubenswrapper[4690]: E0320 13:40:08.431468 4690 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:08 crc kubenswrapper[4690]: E0320 13:40:08.431538 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert podName:54581816-9413-47c6-889c-1ae815299b20 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:16.431519378 +0000 UTC m=+1062.721119371 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert") pod "infra-operator-controller-manager-669fff9c7c-qqp5b" (UID: "54581816-9413-47c6-889c-1ae815299b20") : secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:08 crc kubenswrapper[4690]: I0320 13:40:08.836084 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:08 crc kubenswrapper[4690]: E0320 13:40:08.836305 4690 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:08 crc kubenswrapper[4690]: E0320 13:40:08.836373 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert podName:f13f1ec1-31f1-4492-876d-42ad21a46373 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:16.836353381 +0000 UTC m=+1063.125953324 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert") pod "openstack-baremetal-operator-controller-manager-86657c54f5bfk29" (UID: "f13f1ec1-31f1-4492-876d-42ad21a46373") : secret "openstack-baremetal-operator-webhook-server-cert" not found Mar 20 13:40:09 crc kubenswrapper[4690]: I0320 13:40:09.140034 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:09 crc kubenswrapper[4690]: I0320 13:40:09.140135 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:09 crc kubenswrapper[4690]: E0320 13:40:09.140210 4690 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Mar 20 13:40:09 crc kubenswrapper[4690]: E0320 13:40:09.140280 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:17.140261303 +0000 UTC m=+1063.429861306 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "metrics-server-cert" not found Mar 20 13:40:09 crc kubenswrapper[4690]: E0320 13:40:09.140287 4690 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Mar 20 13:40:09 crc kubenswrapper[4690]: E0320 13:40:09.140339 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs podName:bcc248ae-99f3-4554-8cfb-9bf5f72385ce nodeName:}" failed. No retries permitted until 2026-03-20 13:40:17.140325104 +0000 UTC m=+1063.429925047 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs") pod "openstack-operator-controller-manager-85d5885774-zfkgt" (UID: "bcc248ae-99f3-4554-8cfb-9bf5f72385ce") : secret "webhook-server-cert" not found Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.101383 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" event={"ID":"11b96fe2-4da9-41a1-b4e3-31f5e17d8ad6","Type":"ContainerStarted","Data":"31fec2e34ed07d80ff6def548aa0048645993d8a6932d4fa165c7c3cf3ddd789"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.101824 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.103878 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" event={"ID":"f4df792a-6016-407b-8ff0-338ab8db08f7","Type":"ContainerStarted","Data":"044be17f50a8fafb5c019bee6a5ad82374cb67de634b3e4ea97cd3240bae5e55"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.104250 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.105493 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" event={"ID":"870fb6a4-04ea-4c9a-ae30-4acb7e4a050c","Type":"ContainerStarted","Data":"f6d58f9dc3c85405b42739ae6a2d530bdf43a07eb56de7f32423042be58c2c04"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.105593 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.106575 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" event={"ID":"af375454-db79-4671-9be9-14e7b5927452","Type":"ContainerStarted","Data":"262d0bcfe5cb6e54ea72e0d78e4941b574cc95fd03566b368a78443fb1d77830"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.106889 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.107812 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" event={"ID":"a602a20e-98c2-4eef-8a20-a873a5f04b56","Type":"ContainerStarted","Data":"e1702b47a31b7acb39af0df1731893beaa35e9a79c2548f8428b279524cd1672"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.108133 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.109477 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" event={"ID":"e6edfe21-12c6-4ca3-9992-c47b65455a25","Type":"ContainerStarted","Data":"e7f7d0f31e494f45c81f210548764ec50e6b37b7784a3f2f27c90e57f1e03353"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.109778 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.110752 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" event={"ID":"d30dfb8b-246a-461f-8230-4e12b67f8475","Type":"ContainerStarted","Data":"070d5a3324c0d33685bc8eaa11e7bb5698b780d6ed434eb7d20a46ee69b08a2b"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.111106 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.112087 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" event={"ID":"67bfb5a2-f27a-48d1-829d-67c998495611","Type":"ContainerStarted","Data":"438ac3d880dec3ed57815b68be00681e8c4c69444f21142cf96bc595699fad51"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.112392 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.113498 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" event={"ID":"aa8d9ef3-cf4c-4083-bb69-579193795ffb","Type":"ContainerStarted","Data":"c7093ef21973ab6e25a49a3c1520838cddb5a1daa9090b6abceb321578daa156"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.113802 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.114756 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" event={"ID":"509b5616-903e-4638-bcac-7db706a605fb","Type":"ContainerStarted","Data":"347e3c19d213436278166a740a9ecb8b7a1efda5e1ed26a38bfd6613c1873b20"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.115154 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.116246 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" event={"ID":"814ca78b-98a4-4e08-8c17-2ac1e45f3f70","Type":"ContainerStarted","Data":"c0ad1e8d5f9c7c35e0c657c0b9074d3e63108ab744e9ea098a61cc8dc30a198b"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.116417 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.117498 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" event={"ID":"be5cbf2c-ee29-4c1a-9eca-50c8069886fd","Type":"ContainerStarted","Data":"7fd3687588e73e68f9f15dd360189fe1239ea88df66402185017dca6bbc61979"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.117815 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.118769 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" event={"ID":"0b64d73c-d1f2-4823-b33c-f5b4e7cfa6a5","Type":"ContainerStarted","Data":"8a1521fe819fe22c1acd1f866bf2c40ad5329c6aa76580ecaaa35c08cbb8b448"} Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.119086 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.132826 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" podStartSLOduration=2.986484703 podStartE2EDuration="14.13281364s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.102163845 +0000 UTC m=+1048.391763788" lastFinishedPulling="2026-03-20 13:40:13.248492782 +0000 UTC m=+1059.538092725" observedRunningTime="2026-03-20 13:40:14.13141023 +0000 UTC m=+1060.421010173" watchObservedRunningTime="2026-03-20 13:40:14.13281364 +0000 UTC m=+1060.422413583" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.167929 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" podStartSLOduration=3.347197962 podStartE2EDuration="14.167911768s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.433658923 +0000 UTC m=+1048.723258876" lastFinishedPulling="2026-03-20 13:40:13.254372739 +0000 UTC m=+1059.543972682" observedRunningTime="2026-03-20 13:40:14.164299415 +0000 UTC m=+1060.453899358" watchObservedRunningTime="2026-03-20 13:40:14.167911768 +0000 UTC m=+1060.457511711" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.244044 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" podStartSLOduration=3.129541023 podStartE2EDuration="14.244028313s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.132512959 +0000 UTC m=+1048.422112902" lastFinishedPulling="2026-03-20 13:40:13.247000259 +0000 UTC m=+1059.536600192" observedRunningTime="2026-03-20 13:40:14.239931546 +0000 UTC m=+1060.529531489" watchObservedRunningTime="2026-03-20 13:40:14.244028313 +0000 UTC m=+1060.533628256" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.244681 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" podStartSLOduration=3.371535224 podStartE2EDuration="14.244675941s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.457170131 +0000 UTC m=+1048.746770074" lastFinishedPulling="2026-03-20 13:40:13.330310828 +0000 UTC m=+1059.619910791" observedRunningTime="2026-03-20 13:40:14.214477962 +0000 UTC m=+1060.504077905" watchObservedRunningTime="2026-03-20 13:40:14.244675941 +0000 UTC m=+1060.534275884" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.267383 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" podStartSLOduration=3.080647551 podStartE2EDuration="14.267363856s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.060443109 +0000 UTC m=+1048.350043042" lastFinishedPulling="2026-03-20 13:40:13.247159404 +0000 UTC m=+1059.536759347" observedRunningTime="2026-03-20 13:40:14.26432734 +0000 UTC m=+1060.553927283" watchObservedRunningTime="2026-03-20 13:40:14.267363856 +0000 UTC m=+1060.556963799" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.288723 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" podStartSLOduration=3.093143597 podStartE2EDuration="14.288707783s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.051456614 +0000 UTC m=+1048.341056557" lastFinishedPulling="2026-03-20 13:40:13.24702081 +0000 UTC m=+1059.536620743" observedRunningTime="2026-03-20 13:40:14.288086345 +0000 UTC m=+1060.577686288" watchObservedRunningTime="2026-03-20 13:40:14.288707783 +0000 UTC m=+1060.578307726" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.329155 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" podStartSLOduration=3.197782643 podStartE2EDuration="14.329137953s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.116338269 +0000 UTC m=+1048.405938212" lastFinishedPulling="2026-03-20 13:40:13.247693579 +0000 UTC m=+1059.537293522" observedRunningTime="2026-03-20 13:40:14.326156098 +0000 UTC m=+1060.615756041" watchObservedRunningTime="2026-03-20 13:40:14.329137953 +0000 UTC m=+1060.618737896" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.353384 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" podStartSLOduration=2.5677946179999998 podStartE2EDuration="14.353363042s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:01.461435585 +0000 UTC m=+1047.751035528" lastFinishedPulling="2026-03-20 13:40:13.247004019 +0000 UTC m=+1059.536603952" observedRunningTime="2026-03-20 13:40:14.348084072 +0000 UTC m=+1060.637684015" watchObservedRunningTime="2026-03-20 13:40:14.353363042 +0000 UTC m=+1060.642962985" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.402753 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" podStartSLOduration=3.139069433 podStartE2EDuration="14.402737136s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.051256238 +0000 UTC m=+1048.340856181" lastFinishedPulling="2026-03-20 13:40:13.314923941 +0000 UTC m=+1059.604523884" observedRunningTime="2026-03-20 13:40:14.367741691 +0000 UTC m=+1060.657341624" watchObservedRunningTime="2026-03-20 13:40:14.402737136 +0000 UTC m=+1060.692337079" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.403097 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" podStartSLOduration=3.24869224 podStartE2EDuration="14.403091356s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.092628574 +0000 UTC m=+1048.382228517" lastFinishedPulling="2026-03-20 13:40:13.24702769 +0000 UTC m=+1059.536627633" observedRunningTime="2026-03-20 13:40:14.397786365 +0000 UTC m=+1060.687386308" watchObservedRunningTime="2026-03-20 13:40:14.403091356 +0000 UTC m=+1060.692691299" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.423374 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" podStartSLOduration=3.29896765 podStartE2EDuration="14.423357522s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.122140564 +0000 UTC m=+1048.411740507" lastFinishedPulling="2026-03-20 13:40:13.246530436 +0000 UTC m=+1059.536130379" observedRunningTime="2026-03-20 13:40:14.420224533 +0000 UTC m=+1060.709824476" watchObservedRunningTime="2026-03-20 13:40:14.423357522 +0000 UTC m=+1060.712957465" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.449953 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" podStartSLOduration=3.3050821040000002 podStartE2EDuration="14.449938918s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.109536505 +0000 UTC m=+1048.399136448" lastFinishedPulling="2026-03-20 13:40:13.254393319 +0000 UTC m=+1059.543993262" observedRunningTime="2026-03-20 13:40:14.446339716 +0000 UTC m=+1060.735939659" watchObservedRunningTime="2026-03-20 13:40:14.449938918 +0000 UTC m=+1060.739538861" Mar 20 13:40:14 crc kubenswrapper[4690]: I0320 13:40:14.470689 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" podStartSLOduration=2.951252923 podStartE2EDuration="14.470671068s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:01.727258705 +0000 UTC m=+1048.016858648" lastFinishedPulling="2026-03-20 13:40:13.24667685 +0000 UTC m=+1059.536276793" observedRunningTime="2026-03-20 13:40:14.468924638 +0000 UTC m=+1060.758524581" watchObservedRunningTime="2026-03-20 13:40:14.470671068 +0000 UTC m=+1060.760271011" Mar 20 13:40:16 crc kubenswrapper[4690]: I0320 13:40:16.442545 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:16 crc kubenswrapper[4690]: E0320 13:40:16.442732 4690 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:16 crc kubenswrapper[4690]: E0320 13:40:16.443227 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert podName:54581816-9413-47c6-889c-1ae815299b20 nodeName:}" failed. No retries permitted until 2026-03-20 13:40:32.443206841 +0000 UTC m=+1078.732806784 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert") pod "infra-operator-controller-manager-669fff9c7c-qqp5b" (UID: "54581816-9413-47c6-889c-1ae815299b20") : secret "infra-operator-webhook-server-cert" not found Mar 20 13:40:16 crc kubenswrapper[4690]: I0320 13:40:16.852238 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:16 crc kubenswrapper[4690]: I0320 13:40:16.861125 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f13f1ec1-31f1-4492-876d-42ad21a46373-cert\") pod \"openstack-baremetal-operator-controller-manager-86657c54f5bfk29\" (UID: \"f13f1ec1-31f1-4492-876d-42ad21a46373\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:16 crc kubenswrapper[4690]: I0320 13:40:16.985794 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:17 crc kubenswrapper[4690]: I0320 13:40:17.156341 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:17 crc kubenswrapper[4690]: I0320 13:40:17.156413 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:17 crc kubenswrapper[4690]: I0320 13:40:17.160112 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-webhook-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:17 crc kubenswrapper[4690]: I0320 13:40:17.162636 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/bcc248ae-99f3-4554-8cfb-9bf5f72385ce-metrics-certs\") pod \"openstack-operator-controller-manager-85d5885774-zfkgt\" (UID: \"bcc248ae-99f3-4554-8cfb-9bf5f72385ce\") " pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:17 crc kubenswrapper[4690]: I0320 13:40:17.225128 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:18 crc kubenswrapper[4690]: I0320 13:40:18.511077 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29"] Mar 20 13:40:18 crc kubenswrapper[4690]: W0320 13:40:18.520357 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf13f1ec1_31f1_4492_876d_42ad21a46373.slice/crio-a3d376b07c03127100ddcd77099521b271b5e194f58c5a6d4e5640337b1abb81 WatchSource:0}: Error finding container a3d376b07c03127100ddcd77099521b271b5e194f58c5a6d4e5640337b1abb81: Status 404 returned error can't find the container with id a3d376b07c03127100ddcd77099521b271b5e194f58c5a6d4e5640337b1abb81 Mar 20 13:40:18 crc kubenswrapper[4690]: I0320 13:40:18.772542 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt"] Mar 20 13:40:18 crc kubenswrapper[4690]: W0320 13:40:18.789036 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbcc248ae_99f3_4554_8cfb_9bf5f72385ce.slice/crio-ea933887129d0f2f8c811be607dfba8d02c65ad76a598b896643e4583903800f WatchSource:0}: Error finding container ea933887129d0f2f8c811be607dfba8d02c65ad76a598b896643e4583903800f: Status 404 returned error can't find the container with id ea933887129d0f2f8c811be607dfba8d02c65ad76a598b896643e4583903800f Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.161835 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" event={"ID":"51eaf314-a93e-4736-b4ec-7de18291b971","Type":"ContainerStarted","Data":"2846f20c774bc50495b3e789c2c4c425eda351279ca4254fdbbf271b57737921"} Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.162610 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.165090 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" event={"ID":"bcc248ae-99f3-4554-8cfb-9bf5f72385ce","Type":"ContainerStarted","Data":"75d3042fa41b1cedcb0e87678ff20ed4dfee2cb6f8916e61b04ddf3da5c982e1"} Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.165139 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.165151 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" event={"ID":"bcc248ae-99f3-4554-8cfb-9bf5f72385ce","Type":"ContainerStarted","Data":"ea933887129d0f2f8c811be607dfba8d02c65ad76a598b896643e4583903800f"} Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.169507 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" event={"ID":"15a47805-6294-4f70-ba77-d22857c579b9","Type":"ContainerStarted","Data":"f0136ddb041ee88f649a66b49b368fe19e206c5613814e209bd3cda09252f941"} Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.169698 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.172367 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" event={"ID":"a0d899fb-25be-4485-9d4d-77ca047c1524","Type":"ContainerStarted","Data":"8dafa23b0cb7b5f1602448a05aa76da66f937940200ff67a657c0c5821576595"} Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.172529 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.175894 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" event={"ID":"f13f1ec1-31f1-4492-876d-42ad21a46373","Type":"ContainerStarted","Data":"a3d376b07c03127100ddcd77099521b271b5e194f58c5a6d4e5640337b1abb81"} Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.181863 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" podStartSLOduration=3.144177818 podStartE2EDuration="19.181836582s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.187490332 +0000 UTC m=+1048.477090275" lastFinishedPulling="2026-03-20 13:40:18.225149096 +0000 UTC m=+1064.514749039" observedRunningTime="2026-03-20 13:40:19.175709878 +0000 UTC m=+1065.465309831" watchObservedRunningTime="2026-03-20 13:40:19.181836582 +0000 UTC m=+1065.471436525" Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.196541 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" podStartSLOduration=3.445837787 podStartE2EDuration="19.19652593s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.473423623 +0000 UTC m=+1048.763023566" lastFinishedPulling="2026-03-20 13:40:18.224111766 +0000 UTC m=+1064.513711709" observedRunningTime="2026-03-20 13:40:19.189298884 +0000 UTC m=+1065.478898827" watchObservedRunningTime="2026-03-20 13:40:19.19652593 +0000 UTC m=+1065.486125873" Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.216061 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" podStartSLOduration=3.472441353 podStartE2EDuration="19.216040215s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.472467376 +0000 UTC m=+1048.762067319" lastFinishedPulling="2026-03-20 13:40:18.216066238 +0000 UTC m=+1064.505666181" observedRunningTime="2026-03-20 13:40:19.213443991 +0000 UTC m=+1065.503043964" watchObservedRunningTime="2026-03-20 13:40:19.216040215 +0000 UTC m=+1065.505640158" Mar 20 13:40:19 crc kubenswrapper[4690]: I0320 13:40:19.238614 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" podStartSLOduration=19.238596996 podStartE2EDuration="19.238596996s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:40:19.237780313 +0000 UTC m=+1065.527380256" watchObservedRunningTime="2026-03-20 13:40:19.238596996 +0000 UTC m=+1065.528196939" Mar 20 13:40:20 crc kubenswrapper[4690]: I0320 13:40:20.668978 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-67dd5f86f5-88wmc" Mar 20 13:40:20 crc kubenswrapper[4690]: I0320 13:40:20.753500 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-8464cc45fb-sbjcq" Mar 20 13:40:20 crc kubenswrapper[4690]: I0320 13:40:20.786446 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-59bc569d95-6jwcd" Mar 20 13:40:20 crc kubenswrapper[4690]: I0320 13:40:20.804888 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-8d58dc466-kq824" Mar 20 13:40:20 crc kubenswrapper[4690]: I0320 13:40:20.853223 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-588d4d986b-dls6p" Mar 20 13:40:20 crc kubenswrapper[4690]: I0320 13:40:20.914212 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6f787dddc9-rxsjr" Mar 20 13:40:20 crc kubenswrapper[4690]: I0320 13:40:20.923180 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-768b96df4c-jfmgm" Mar 20 13:40:20 crc kubenswrapper[4690]: I0320 13:40:20.931819 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-79df6bcc97-dws7r" Mar 20 13:40:20 crc kubenswrapper[4690]: I0320 13:40:20.991575 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-55f864c847-f2cbx" Mar 20 13:40:21 crc kubenswrapper[4690]: I0320 13:40:21.030106 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-67ccfc9778-f5pph" Mar 20 13:40:21 crc kubenswrapper[4690]: I0320 13:40:21.061482 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-767865f676-pt8zd" Mar 20 13:40:21 crc kubenswrapper[4690]: I0320 13:40:21.424177 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-c674c5965-dktfp" Mar 20 13:40:21 crc kubenswrapper[4690]: I0320 13:40:21.448837 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5c5cb9c4d7-gvbnp" Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.202752 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" event={"ID":"775748cf-df5a-466b-9c3f-057ca3ed36ab","Type":"ContainerStarted","Data":"47b08e219799c80f58c6a8726b81582887bcd96f290751f00628e8576664fe78"} Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.203307 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.204259 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" event={"ID":"f13f1ec1-31f1-4492-876d-42ad21a46373","Type":"ContainerStarted","Data":"065e2d68027701b50e4aae8b401f8e822241b46dd8e1f45f28be2cdd69bec2ca"} Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.204408 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.205544 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" event={"ID":"ace63477-4c50-499f-958c-11135ab6a1a2","Type":"ContainerStarted","Data":"e00d6e87b6c4f4e33081918ee443eedf45c6d4782048a12fa9a7eb32cbaf2ae5"} Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.205682 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.207278 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" event={"ID":"49cc1968-cbc1-432a-a952-dec062db3bd5","Type":"ContainerStarted","Data":"71cabc6e2983fc11d28469c2a4562bf552ddbdad65d22fcc60f884f353a265c6"} Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.207432 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.219275 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" podStartSLOduration=3.231739169 podStartE2EDuration="23.219251677s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.442430222 +0000 UTC m=+1048.732030175" lastFinishedPulling="2026-03-20 13:40:22.42994273 +0000 UTC m=+1068.719542683" observedRunningTime="2026-03-20 13:40:23.215864691 +0000 UTC m=+1069.505464644" watchObservedRunningTime="2026-03-20 13:40:23.219251677 +0000 UTC m=+1069.508851620" Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.238740 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" podStartSLOduration=3.265121718 podStartE2EDuration="23.238722021s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.460045543 +0000 UTC m=+1048.749645496" lastFinishedPulling="2026-03-20 13:40:22.433645846 +0000 UTC m=+1068.723245799" observedRunningTime="2026-03-20 13:40:23.236182658 +0000 UTC m=+1069.525782611" watchObservedRunningTime="2026-03-20 13:40:23.238722021 +0000 UTC m=+1069.528321964" Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.256336 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" podStartSLOduration=3.288858303 podStartE2EDuration="23.256318381s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:02.459873938 +0000 UTC m=+1048.749473871" lastFinishedPulling="2026-03-20 13:40:22.427334016 +0000 UTC m=+1068.716933949" observedRunningTime="2026-03-20 13:40:23.254710265 +0000 UTC m=+1069.544310208" watchObservedRunningTime="2026-03-20 13:40:23.256318381 +0000 UTC m=+1069.545918324" Mar 20 13:40:23 crc kubenswrapper[4690]: I0320 13:40:23.283952 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" podStartSLOduration=19.359870555 podStartE2EDuration="23.283935826s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:18.522211424 +0000 UTC m=+1064.811811367" lastFinishedPulling="2026-03-20 13:40:22.446276695 +0000 UTC m=+1068.735876638" observedRunningTime="2026-03-20 13:40:23.279667845 +0000 UTC m=+1069.569267788" watchObservedRunningTime="2026-03-20 13:40:23.283935826 +0000 UTC m=+1069.573535779" Mar 20 13:40:27 crc kubenswrapper[4690]: I0320 13:40:27.237911 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-85d5885774-zfkgt" Mar 20 13:40:31 crc kubenswrapper[4690]: I0320 13:40:31.120585 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-5d488d59fb-54czp" Mar 20 13:40:31 crc kubenswrapper[4690]: I0320 13:40:31.316269 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-5b9f45d989-x5l86" Mar 20 13:40:31 crc kubenswrapper[4690]: I0320 13:40:31.354297 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-884679f54-5d2n4" Mar 20 13:40:31 crc kubenswrapper[4690]: I0320 13:40:31.382793 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-5784578c99-hhg2z" Mar 20 13:40:31 crc kubenswrapper[4690]: I0320 13:40:31.499432 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-d6b694c5-9bx66" Mar 20 13:40:31 crc kubenswrapper[4690]: I0320 13:40:31.534291 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-6c4d75f7f9-pfj42" Mar 20 13:40:32 crc kubenswrapper[4690]: I0320 13:40:32.498270 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:32 crc kubenswrapper[4690]: I0320 13:40:32.507566 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/54581816-9413-47c6-889c-1ae815299b20-cert\") pod \"infra-operator-controller-manager-669fff9c7c-qqp5b\" (UID: \"54581816-9413-47c6-889c-1ae815299b20\") " pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:32 crc kubenswrapper[4690]: I0320 13:40:32.623590 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:33 crc kubenswrapper[4690]: I0320 13:40:33.039796 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b"] Mar 20 13:40:33 crc kubenswrapper[4690]: W0320 13:40:33.047423 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod54581816_9413_47c6_889c_1ae815299b20.slice/crio-8602dc9063501448fa9478b505506b8850728315a089564fbc6cfabe433a4c8d WatchSource:0}: Error finding container 8602dc9063501448fa9478b505506b8850728315a089564fbc6cfabe433a4c8d: Status 404 returned error can't find the container with id 8602dc9063501448fa9478b505506b8850728315a089564fbc6cfabe433a4c8d Mar 20 13:40:33 crc kubenswrapper[4690]: I0320 13:40:33.295070 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" event={"ID":"54581816-9413-47c6-889c-1ae815299b20","Type":"ContainerStarted","Data":"8602dc9063501448fa9478b505506b8850728315a089564fbc6cfabe433a4c8d"} Mar 20 13:40:33 crc kubenswrapper[4690]: I0320 13:40:33.830398 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:40:33 crc kubenswrapper[4690]: I0320 13:40:33.830910 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:40:36 crc kubenswrapper[4690]: I0320 13:40:36.995363 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-86657c54f5bfk29" Mar 20 13:40:42 crc kubenswrapper[4690]: I0320 13:40:42.371433 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" event={"ID":"54581816-9413-47c6-889c-1ae815299b20","Type":"ContainerStarted","Data":"5a6a86fd171793bd8c4b477570844690836d7c3e0d9bb0f77789d18d57dbd217"} Mar 20 13:40:42 crc kubenswrapper[4690]: I0320 13:40:42.372105 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:40:42 crc kubenswrapper[4690]: I0320 13:40:42.405222 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" podStartSLOduration=34.183445152 podStartE2EDuration="42.40520111s" podCreationTimestamp="2026-03-20 13:40:00 +0000 UTC" firstStartedPulling="2026-03-20 13:40:33.049419454 +0000 UTC m=+1079.339019407" lastFinishedPulling="2026-03-20 13:40:41.271175422 +0000 UTC m=+1087.560775365" observedRunningTime="2026-03-20 13:40:42.39323223 +0000 UTC m=+1088.682832183" watchObservedRunningTime="2026-03-20 13:40:42.40520111 +0000 UTC m=+1088.694801063" Mar 20 13:40:43 crc kubenswrapper[4690]: I0320 13:40:43.062960 4690 scope.go:117] "RemoveContainer" containerID="4dece95a6e6f34d35d1da45af685042a97fc9d0db7c61e7bb42cd283eb45edee" Mar 20 13:40:52 crc kubenswrapper[4690]: I0320 13:40:52.630542 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-669fff9c7c-qqp5b" Mar 20 13:41:03 crc kubenswrapper[4690]: I0320 13:41:03.839731 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:41:03 crc kubenswrapper[4690]: I0320 13:41:03.840289 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.571307 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s74gm"] Mar 20 13:41:10 crc kubenswrapper[4690]: E0320 13:41:10.572099 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92fadff7-ff1d-474b-8043-c0fb44d2e635" containerName="oc" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.572111 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="92fadff7-ff1d-474b-8043-c0fb44d2e635" containerName="oc" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.572242 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="92fadff7-ff1d-474b-8043-c0fb44d2e635" containerName="oc" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.573388 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.576072 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.579599 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.579738 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.579839 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-p49qw" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.596210 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s74gm"] Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.738868 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e746b93e-8dfe-42ad-a733-2807a0347306-config\") pod \"dnsmasq-dns-675f4bcbfc-s74gm\" (UID: \"e746b93e-8dfe-42ad-a733-2807a0347306\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.738949 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q4fk\" (UniqueName: \"kubernetes.io/projected/e746b93e-8dfe-42ad-a733-2807a0347306-kube-api-access-2q4fk\") pod \"dnsmasq-dns-675f4bcbfc-s74gm\" (UID: \"e746b93e-8dfe-42ad-a733-2807a0347306\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.750385 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dhrsg"] Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.751734 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.753515 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.760881 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dhrsg"] Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.841251 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-dhrsg\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.841319 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e746b93e-8dfe-42ad-a733-2807a0347306-config\") pod \"dnsmasq-dns-675f4bcbfc-s74gm\" (UID: \"e746b93e-8dfe-42ad-a733-2807a0347306\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.841338 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-config\") pod \"dnsmasq-dns-78dd6ddcc-dhrsg\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.841492 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psqfz\" (UniqueName: \"kubernetes.io/projected/4c086076-52e7-43e8-9110-df582b8773e4-kube-api-access-psqfz\") pod \"dnsmasq-dns-78dd6ddcc-dhrsg\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.841522 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q4fk\" (UniqueName: \"kubernetes.io/projected/e746b93e-8dfe-42ad-a733-2807a0347306-kube-api-access-2q4fk\") pod \"dnsmasq-dns-675f4bcbfc-s74gm\" (UID: \"e746b93e-8dfe-42ad-a733-2807a0347306\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.842192 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e746b93e-8dfe-42ad-a733-2807a0347306-config\") pod \"dnsmasq-dns-675f4bcbfc-s74gm\" (UID: \"e746b93e-8dfe-42ad-a733-2807a0347306\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.862750 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q4fk\" (UniqueName: \"kubernetes.io/projected/e746b93e-8dfe-42ad-a733-2807a0347306-kube-api-access-2q4fk\") pod \"dnsmasq-dns-675f4bcbfc-s74gm\" (UID: \"e746b93e-8dfe-42ad-a733-2807a0347306\") " pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.897083 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.942984 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-config\") pod \"dnsmasq-dns-78dd6ddcc-dhrsg\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.943322 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psqfz\" (UniqueName: \"kubernetes.io/projected/4c086076-52e7-43e8-9110-df582b8773e4-kube-api-access-psqfz\") pod \"dnsmasq-dns-78dd6ddcc-dhrsg\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.943636 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-dhrsg\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.944606 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-config\") pod \"dnsmasq-dns-78dd6ddcc-dhrsg\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.944827 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-dhrsg\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:10 crc kubenswrapper[4690]: I0320 13:41:10.968965 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psqfz\" (UniqueName: \"kubernetes.io/projected/4c086076-52e7-43e8-9110-df582b8773e4-kube-api-access-psqfz\") pod \"dnsmasq-dns-78dd6ddcc-dhrsg\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:11 crc kubenswrapper[4690]: I0320 13:41:11.065318 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:11 crc kubenswrapper[4690]: I0320 13:41:11.360621 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s74gm"] Mar 20 13:41:11 crc kubenswrapper[4690]: I0320 13:41:11.482809 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dhrsg"] Mar 20 13:41:11 crc kubenswrapper[4690]: I0320 13:41:11.599189 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" event={"ID":"4c086076-52e7-43e8-9110-df582b8773e4","Type":"ContainerStarted","Data":"8a3a2791a1e74e7ba86f19c2c2cf21061a0e4982be966ce4313746dfc2a79c8b"} Mar 20 13:41:11 crc kubenswrapper[4690]: I0320 13:41:11.601501 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" event={"ID":"e746b93e-8dfe-42ad-a733-2807a0347306","Type":"ContainerStarted","Data":"dcff189279fbbe1e2221af554947dc96a52e21906f9df0ddbcab6b1e814736e5"} Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.176962 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s74gm"] Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.201954 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-697np"] Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.203250 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.220968 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-697np"] Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.388103 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-config\") pod \"dnsmasq-dns-666b6646f7-697np\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.388145 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-dns-svc\") pod \"dnsmasq-dns-666b6646f7-697np\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.388180 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqm4f\" (UniqueName: \"kubernetes.io/projected/5b091e0c-de5b-425b-b9be-0a2def1592fc-kube-api-access-wqm4f\") pod \"dnsmasq-dns-666b6646f7-697np\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.394040 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dhrsg"] Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.413423 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gp2rt"] Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.415073 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.427752 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gp2rt"] Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.489520 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-config\") pod \"dnsmasq-dns-666b6646f7-697np\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.489565 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-dns-svc\") pod \"dnsmasq-dns-666b6646f7-697np\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.489589 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqm4f\" (UniqueName: \"kubernetes.io/projected/5b091e0c-de5b-425b-b9be-0a2def1592fc-kube-api-access-wqm4f\") pod \"dnsmasq-dns-666b6646f7-697np\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.491298 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-dns-svc\") pod \"dnsmasq-dns-666b6646f7-697np\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.490486 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-config\") pod \"dnsmasq-dns-666b6646f7-697np\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.511930 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqm4f\" (UniqueName: \"kubernetes.io/projected/5b091e0c-de5b-425b-b9be-0a2def1592fc-kube-api-access-wqm4f\") pod \"dnsmasq-dns-666b6646f7-697np\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.569120 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.591542 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-config\") pod \"dnsmasq-dns-57d769cc4f-gp2rt\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.591581 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-gp2rt\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.591599 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qldwj\" (UniqueName: \"kubernetes.io/projected/c515e2c8-7d1c-4010-989a-1c61f02deea8-kube-api-access-qldwj\") pod \"dnsmasq-dns-57d769cc4f-gp2rt\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.697824 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-config\") pod \"dnsmasq-dns-57d769cc4f-gp2rt\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.698173 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-gp2rt\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.698195 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qldwj\" (UniqueName: \"kubernetes.io/projected/c515e2c8-7d1c-4010-989a-1c61f02deea8-kube-api-access-qldwj\") pod \"dnsmasq-dns-57d769cc4f-gp2rt\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.699635 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-config\") pod \"dnsmasq-dns-57d769cc4f-gp2rt\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.700152 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-gp2rt\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.715248 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qldwj\" (UniqueName: \"kubernetes.io/projected/c515e2c8-7d1c-4010-989a-1c61f02deea8-kube-api-access-qldwj\") pod \"dnsmasq-dns-57d769cc4f-gp2rt\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:13 crc kubenswrapper[4690]: I0320 13:41:13.751661 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.017367 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-697np"] Mar 20 13:41:14 crc kubenswrapper[4690]: W0320 13:41:14.020268 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b091e0c_de5b_425b_b9be_0a2def1592fc.slice/crio-fe8a537c27090391210ea30cbfaebe035c1d6fd9459aa7ee243281020f1dcdf8 WatchSource:0}: Error finding container fe8a537c27090391210ea30cbfaebe035c1d6fd9459aa7ee243281020f1dcdf8: Status 404 returned error can't find the container with id fe8a537c27090391210ea30cbfaebe035c1d6fd9459aa7ee243281020f1dcdf8 Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.157576 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gp2rt"] Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.181245 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.182645 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.185533 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.188697 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.191779 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.192392 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.193160 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-njsl8" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.200881 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.209311 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.225920 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308104 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9508cc5-d6ca-435f-949a-790440ed5f11-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308162 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308185 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308209 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308232 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308259 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308278 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9508cc5-d6ca-435f-949a-790440ed5f11-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308292 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9508cc5-d6ca-435f-949a-790440ed5f11-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308324 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9508cc5-d6ca-435f-949a-790440ed5f11-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308346 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9508cc5-d6ca-435f-949a-790440ed5f11-config-data\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.308361 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9x4g\" (UniqueName: \"kubernetes.io/projected/c9508cc5-d6ca-435f-949a-790440ed5f11-kube-api-access-v9x4g\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.369908 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.371306 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.373392 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.429134 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.429349 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.429442 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.429501 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.429537 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.429542 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-qcb6d" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430230 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9508cc5-d6ca-435f-949a-790440ed5f11-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430276 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9508cc5-d6ca-435f-949a-790440ed5f11-config-data\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430301 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9x4g\" (UniqueName: \"kubernetes.io/projected/c9508cc5-d6ca-435f-949a-790440ed5f11-kube-api-access-v9x4g\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430350 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9508cc5-d6ca-435f-949a-790440ed5f11-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430373 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430387 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430407 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430427 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430456 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430474 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9508cc5-d6ca-435f-949a-790440ed5f11-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.430492 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9508cc5-d6ca-435f-949a-790440ed5f11-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.431072 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c9508cc5-d6ca-435f-949a-790440ed5f11-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.431423 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.431561 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c9508cc5-d6ca-435f-949a-790440ed5f11-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.436132 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.436977 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c9508cc5-d6ca-435f-949a-790440ed5f11-config-data\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.437144 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c9508cc5-d6ca-435f-949a-790440ed5f11-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.438446 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.444581 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.445447 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.446154 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c9508cc5-d6ca-435f-949a-790440ed5f11-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.450358 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c9508cc5-d6ca-435f-949a-790440ed5f11-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.464070 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9x4g\" (UniqueName: \"kubernetes.io/projected/c9508cc5-d6ca-435f-949a-790440ed5f11-kube-api-access-v9x4g\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.491573 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"rabbitmq-server-0\" (UID: \"c9508cc5-d6ca-435f-949a-790440ed5f11\") " pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.508923 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.532792 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.532839 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b6c3ab56-9d3c-431c-a697-d6df19b67a21-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.533035 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b6c3ab56-9d3c-431c-a697-d6df19b67a21-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.533087 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.533146 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9w6z\" (UniqueName: \"kubernetes.io/projected/b6c3ab56-9d3c-431c-a697-d6df19b67a21-kube-api-access-h9w6z\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.533180 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6c3ab56-9d3c-431c-a697-d6df19b67a21-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.533202 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.533259 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.533310 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b6c3ab56-9d3c-431c-a697-d6df19b67a21-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.533401 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.533460 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b6c3ab56-9d3c-431c-a697-d6df19b67a21-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.626721 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-697np" event={"ID":"5b091e0c-de5b-425b-b9be-0a2def1592fc","Type":"ContainerStarted","Data":"fe8a537c27090391210ea30cbfaebe035c1d6fd9459aa7ee243281020f1dcdf8"} Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634447 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634496 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b6c3ab56-9d3c-431c-a697-d6df19b67a21-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634556 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b6c3ab56-9d3c-431c-a697-d6df19b67a21-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634579 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634611 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9w6z\" (UniqueName: \"kubernetes.io/projected/b6c3ab56-9d3c-431c-a697-d6df19b67a21-kube-api-access-h9w6z\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634659 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6c3ab56-9d3c-431c-a697-d6df19b67a21-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634682 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634718 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634747 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b6c3ab56-9d3c-431c-a697-d6df19b67a21-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634795 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.634820 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b6c3ab56-9d3c-431c-a697-d6df19b67a21-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.635030 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.635175 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.635301 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.636091 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b6c3ab56-9d3c-431c-a697-d6df19b67a21-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.636184 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b6c3ab56-9d3c-431c-a697-d6df19b67a21-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.636921 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b6c3ab56-9d3c-431c-a697-d6df19b67a21-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.637919 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b6c3ab56-9d3c-431c-a697-d6df19b67a21-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.640713 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b6c3ab56-9d3c-431c-a697-d6df19b67a21-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.641027 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.648154 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b6c3ab56-9d3c-431c-a697-d6df19b67a21-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.651794 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9w6z\" (UniqueName: \"kubernetes.io/projected/b6c3ab56-9d3c-431c-a697-d6df19b67a21-kube-api-access-h9w6z\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.654355 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b6c3ab56-9d3c-431c-a697-d6df19b67a21\") " pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:14 crc kubenswrapper[4690]: I0320 13:41:14.754683 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:41:15 crc kubenswrapper[4690]: I0320 13:41:15.992705 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:15.996342 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:15.998756 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-k9bks" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:15.998940 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:15.999028 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:15.999874 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.001665 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.019112 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.079451 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a4acc48-2e3c-4b76-b55e-e9152c405f11-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.079505 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5a4acc48-2e3c-4b76-b55e-e9152c405f11-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.079524 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2hc67\" (UniqueName: \"kubernetes.io/projected/5a4acc48-2e3c-4b76-b55e-e9152c405f11-kube-api-access-2hc67\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.079716 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a4acc48-2e3c-4b76-b55e-e9152c405f11-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.079829 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5a4acc48-2e3c-4b76-b55e-e9152c405f11-config-data-default\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.079921 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.079971 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5a4acc48-2e3c-4b76-b55e-e9152c405f11-kolla-config\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.079990 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a4acc48-2e3c-4b76-b55e-e9152c405f11-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.180803 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5a4acc48-2e3c-4b76-b55e-e9152c405f11-kolla-config\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.180942 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a4acc48-2e3c-4b76-b55e-e9152c405f11-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.181074 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a4acc48-2e3c-4b76-b55e-e9152c405f11-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.181138 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5a4acc48-2e3c-4b76-b55e-e9152c405f11-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.181194 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2hc67\" (UniqueName: \"kubernetes.io/projected/5a4acc48-2e3c-4b76-b55e-e9152c405f11-kube-api-access-2hc67\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.181332 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a4acc48-2e3c-4b76-b55e-e9152c405f11-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.181448 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5a4acc48-2e3c-4b76-b55e-e9152c405f11-config-data-default\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.181572 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.181685 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/5a4acc48-2e3c-4b76-b55e-e9152c405f11-config-data-generated\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.181944 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.184112 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/5a4acc48-2e3c-4b76-b55e-e9152c405f11-config-data-default\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.184215 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/5a4acc48-2e3c-4b76-b55e-e9152c405f11-kolla-config\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.185016 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5a4acc48-2e3c-4b76-b55e-e9152c405f11-operator-scripts\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.195201 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/5a4acc48-2e3c-4b76-b55e-e9152c405f11-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.195620 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5a4acc48-2e3c-4b76-b55e-e9152c405f11-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.202249 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2hc67\" (UniqueName: \"kubernetes.io/projected/5a4acc48-2e3c-4b76-b55e-e9152c405f11-kube-api-access-2hc67\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.217059 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"openstack-galera-0\" (UID: \"5a4acc48-2e3c-4b76-b55e-e9152c405f11\") " pod="openstack/openstack-galera-0" Mar 20 13:41:16 crc kubenswrapper[4690]: I0320 13:41:16.328365 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: W0320 13:41:17.249088 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc515e2c8_7d1c_4010_989a_1c61f02deea8.slice/crio-cde3e5dbc338c6a04a079af56e3208622c8715ce5eced5b655ab1bcc8a1ec650 WatchSource:0}: Error finding container cde3e5dbc338c6a04a079af56e3208622c8715ce5eced5b655ab1bcc8a1ec650: Status 404 returned error can't find the container with id cde3e5dbc338c6a04a079af56e3208622c8715ce5eced5b655ab1bcc8a1ec650 Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.544779 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.545895 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.551705 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.551828 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-8pfj4" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.551936 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.555814 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.562397 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.604718 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.604782 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hhhp\" (UniqueName: \"kubernetes.io/projected/a81f6ca1-a67e-4cbc-99de-32701eccb13b-kube-api-access-7hhhp\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.604806 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a81f6ca1-a67e-4cbc-99de-32701eccb13b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.604858 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a81f6ca1-a67e-4cbc-99de-32701eccb13b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.604898 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a81f6ca1-a67e-4cbc-99de-32701eccb13b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.604917 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a81f6ca1-a67e-4cbc-99de-32701eccb13b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.604941 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a81f6ca1-a67e-4cbc-99de-32701eccb13b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.604957 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a81f6ca1-a67e-4cbc-99de-32701eccb13b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.649248 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" event={"ID":"c515e2c8-7d1c-4010-989a-1c61f02deea8","Type":"ContainerStarted","Data":"cde3e5dbc338c6a04a079af56e3208622c8715ce5eced5b655ab1bcc8a1ec650"} Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.706052 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.706113 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hhhp\" (UniqueName: \"kubernetes.io/projected/a81f6ca1-a67e-4cbc-99de-32701eccb13b-kube-api-access-7hhhp\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.706139 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a81f6ca1-a67e-4cbc-99de-32701eccb13b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.706182 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a81f6ca1-a67e-4cbc-99de-32701eccb13b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.706200 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.706229 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a81f6ca1-a67e-4cbc-99de-32701eccb13b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.706624 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a81f6ca1-a67e-4cbc-99de-32701eccb13b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.706654 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a81f6ca1-a67e-4cbc-99de-32701eccb13b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.706674 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a81f6ca1-a67e-4cbc-99de-32701eccb13b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.708585 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a81f6ca1-a67e-4cbc-99de-32701eccb13b-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.708863 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a81f6ca1-a67e-4cbc-99de-32701eccb13b-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.709086 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a81f6ca1-a67e-4cbc-99de-32701eccb13b-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.710345 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a81f6ca1-a67e-4cbc-99de-32701eccb13b-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.712542 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a81f6ca1-a67e-4cbc-99de-32701eccb13b-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.712600 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/a81f6ca1-a67e-4cbc-99de-32701eccb13b-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.723326 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hhhp\" (UniqueName: \"kubernetes.io/projected/a81f6ca1-a67e-4cbc-99de-32701eccb13b-kube-api-access-7hhhp\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.748791 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"openstack-cell1-galera-0\" (UID: \"a81f6ca1-a67e-4cbc-99de-32701eccb13b\") " pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.841031 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.841930 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.844049 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.876019 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.877397 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.877776 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-czstk" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.882868 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.909122 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-config-data\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.909184 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-82455\" (UniqueName: \"kubernetes.io/projected/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-kube-api-access-82455\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.909265 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.909295 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:17 crc kubenswrapper[4690]: I0320 13:41:17.909325 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-kolla-config\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.010680 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-config-data\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.010727 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-82455\" (UniqueName: \"kubernetes.io/projected/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-kube-api-access-82455\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.010767 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.010792 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.010812 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-kolla-config\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.011497 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-config-data\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.011530 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-kolla-config\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.032619 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-memcached-tls-certs\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.033148 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-82455\" (UniqueName: \"kubernetes.io/projected/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-kube-api-access-82455\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.047564 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce9abe13-14e4-4ce8-ae28-b52022d16a0e-combined-ca-bundle\") pod \"memcached-0\" (UID: \"ce9abe13-14e4-4ce8-ae28-b52022d16a0e\") " pod="openstack/memcached-0" Mar 20 13:41:18 crc kubenswrapper[4690]: I0320 13:41:18.196156 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Mar 20 13:41:20 crc kubenswrapper[4690]: I0320 13:41:20.220774 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Mar 20 13:41:20 crc kubenswrapper[4690]: I0320 13:41:20.222159 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Mar 20 13:41:20 crc kubenswrapper[4690]: I0320 13:41:20.224472 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-c2q9t" Mar 20 13:41:20 crc kubenswrapper[4690]: I0320 13:41:20.227191 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Mar 20 13:41:20 crc kubenswrapper[4690]: I0320 13:41:20.347421 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnn4t\" (UniqueName: \"kubernetes.io/projected/d3e883d8-973f-4e69-a13f-175f1904a203-kube-api-access-nnn4t\") pod \"kube-state-metrics-0\" (UID: \"d3e883d8-973f-4e69-a13f-175f1904a203\") " pod="openstack/kube-state-metrics-0" Mar 20 13:41:20 crc kubenswrapper[4690]: I0320 13:41:20.448951 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnn4t\" (UniqueName: \"kubernetes.io/projected/d3e883d8-973f-4e69-a13f-175f1904a203-kube-api-access-nnn4t\") pod \"kube-state-metrics-0\" (UID: \"d3e883d8-973f-4e69-a13f-175f1904a203\") " pod="openstack/kube-state-metrics-0" Mar 20 13:41:20 crc kubenswrapper[4690]: I0320 13:41:20.468156 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnn4t\" (UniqueName: \"kubernetes.io/projected/d3e883d8-973f-4e69-a13f-175f1904a203-kube-api-access-nnn4t\") pod \"kube-state-metrics-0\" (UID: \"d3e883d8-973f-4e69-a13f-175f1904a203\") " pod="openstack/kube-state-metrics-0" Mar 20 13:41:20 crc kubenswrapper[4690]: I0320 13:41:20.550065 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.322218 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-mxmrl"] Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.323439 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.329966 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-kwrpg" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.331095 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.332432 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.363582 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-s6fhs"] Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.366896 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mxmrl"] Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.367004 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.392391 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-s6fhs"] Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.499578 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-var-run\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.500067 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-var-log\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.500222 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5frj4\" (UniqueName: \"kubernetes.io/projected/ba9868f8-3baf-4ecd-896c-1497873f32d7-kube-api-access-5frj4\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.500372 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-var-log-ovn\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.500535 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-var-run\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.500677 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-ovn-controller-tls-certs\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.500791 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-var-lib\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.500952 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-etc-ovs\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.501065 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-var-run-ovn\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.501167 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-scripts\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.501254 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x6xmb\" (UniqueName: \"kubernetes.io/projected/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-kube-api-access-x6xmb\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.501360 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba9868f8-3baf-4ecd-896c-1497873f32d7-scripts\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.501496 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-combined-ca-bundle\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.602862 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-ovn-controller-tls-certs\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.603182 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-var-lib\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.603318 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-etc-ovs\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.603424 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-var-run-ovn\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.603522 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-scripts\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.603608 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x6xmb\" (UniqueName: \"kubernetes.io/projected/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-kube-api-access-x6xmb\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.603713 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba9868f8-3baf-4ecd-896c-1497873f32d7-scripts\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.604515 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-combined-ca-bundle\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.605233 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-var-run\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.605360 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-var-log\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.605508 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5frj4\" (UniqueName: \"kubernetes.io/projected/ba9868f8-3baf-4ecd-896c-1497873f32d7-kube-api-access-5frj4\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.605638 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-var-log-ovn\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.605762 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-var-run\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.606038 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-var-run\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.606170 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-var-log-ovn\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.604766 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-var-lib\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.606046 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-var-run\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.604796 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-etc-ovs\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.604619 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-var-run-ovn\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.606047 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/ba9868f8-3baf-4ecd-896c-1497873f32d7-var-log\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.607222 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ba9868f8-3baf-4ecd-896c-1497873f32d7-scripts\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.607991 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-scripts\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.618592 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-combined-ca-bundle\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.622896 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5frj4\" (UniqueName: \"kubernetes.io/projected/ba9868f8-3baf-4ecd-896c-1497873f32d7-kube-api-access-5frj4\") pod \"ovn-controller-ovs-s6fhs\" (UID: \"ba9868f8-3baf-4ecd-896c-1497873f32d7\") " pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.625444 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-ovn-controller-tls-certs\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.626671 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x6xmb\" (UniqueName: \"kubernetes.io/projected/9e0061bd-d72c-4aeb-86f0-154e0cccfe15-kube-api-access-x6xmb\") pod \"ovn-controller-mxmrl\" (UID: \"9e0061bd-d72c-4aeb-86f0-154e0cccfe15\") " pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.698209 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:23 crc kubenswrapper[4690]: I0320 13:41:23.701507 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.607802 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.608992 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.611667 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.611918 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-v4d4q" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.616136 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.616178 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.616517 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.632826 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.724395 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.724473 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.724512 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.724554 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.724572 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.724603 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.724622 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9l2k\" (UniqueName: \"kubernetes.io/projected/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-kube-api-access-d9l2k\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.724650 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-config\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.825903 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.825947 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.825980 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.826001 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9l2k\" (UniqueName: \"kubernetes.io/projected/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-kube-api-access-d9l2k\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.826039 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-config\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.826115 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.826163 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.826207 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.826619 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.826641 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.827571 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.829518 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-config\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.839825 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.839880 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.839920 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.844361 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9l2k\" (UniqueName: \"kubernetes.io/projected/c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8-kube-api-access-d9l2k\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.868125 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"ovsdbserver-sb-0\" (UID: \"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8\") " pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:24 crc kubenswrapper[4690]: I0320 13:41:24.927116 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Mar 20 13:41:26 crc kubenswrapper[4690]: I0320 13:41:26.473212 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Mar 20 13:41:26 crc kubenswrapper[4690]: W0320 13:41:26.848741 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc9508cc5_d6ca_435f_949a_790440ed5f11.slice/crio-98e955f103db7951f4873882572473e1880595510048a2fd4ec59b64430f17a7 WatchSource:0}: Error finding container 98e955f103db7951f4873882572473e1880595510048a2fd4ec59b64430f17a7: Status 404 returned error can't find the container with id 98e955f103db7951f4873882572473e1880595510048a2fd4ec59b64430f17a7 Mar 20 13:41:26 crc kubenswrapper[4690]: E0320 13:41:26.852681 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Mar 20 13:41:26 crc kubenswrapper[4690]: E0320 13:41:26.852925 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-psqfz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-dhrsg_openstack(4c086076-52e7-43e8-9110-df582b8773e4): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 13:41:26 crc kubenswrapper[4690]: E0320 13:41:26.855715 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" podUID="4c086076-52e7-43e8-9110-df582b8773e4" Mar 20 13:41:26 crc kubenswrapper[4690]: E0320 13:41:26.872559 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Mar 20 13:41:26 crc kubenswrapper[4690]: E0320 13:41:26.872752 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2q4fk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-s74gm_openstack(e746b93e-8dfe-42ad-a733-2807a0347306): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 13:41:26 crc kubenswrapper[4690]: E0320 13:41:26.873938 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" podUID="e746b93e-8dfe-42ad-a733-2807a0347306" Mar 20 13:41:26 crc kubenswrapper[4690]: I0320 13:41:26.983951 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Mar 20 13:41:26 crc kubenswrapper[4690]: I0320 13:41:26.986867 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:26 crc kubenswrapper[4690]: I0320 13:41:26.989121 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Mar 20 13:41:26 crc kubenswrapper[4690]: I0320 13:41:26.992046 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Mar 20 13:41:26 crc kubenswrapper[4690]: I0320 13:41:26.992250 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Mar 20 13:41:26 crc kubenswrapper[4690]: I0320 13:41:26.992693 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-mjmdt" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.027980 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.165145 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26e022b9-e7f5-4787-abe8-9967d8f4d11e-config\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.165479 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26e022b9-e7f5-4787-abe8-9967d8f4d11e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.165500 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26e022b9-e7f5-4787-abe8-9967d8f4d11e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.165529 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/26e022b9-e7f5-4787-abe8-9967d8f4d11e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.165557 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kwcnz\" (UniqueName: \"kubernetes.io/projected/26e022b9-e7f5-4787-abe8-9967d8f4d11e-kube-api-access-kwcnz\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.165585 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/26e022b9-e7f5-4787-abe8-9967d8f4d11e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.165622 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.165639 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/26e022b9-e7f5-4787-abe8-9967d8f4d11e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.266988 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26e022b9-e7f5-4787-abe8-9967d8f4d11e-config\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.267058 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26e022b9-e7f5-4787-abe8-9967d8f4d11e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.267078 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26e022b9-e7f5-4787-abe8-9967d8f4d11e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.267125 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/26e022b9-e7f5-4787-abe8-9967d8f4d11e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.267152 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kwcnz\" (UniqueName: \"kubernetes.io/projected/26e022b9-e7f5-4787-abe8-9967d8f4d11e-kube-api-access-kwcnz\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.267222 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/26e022b9-e7f5-4787-abe8-9967d8f4d11e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.267283 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.267302 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/26e022b9-e7f5-4787-abe8-9967d8f4d11e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.268345 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.268498 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26e022b9-e7f5-4787-abe8-9967d8f4d11e-config\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.268534 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/26e022b9-e7f5-4787-abe8-9967d8f4d11e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.269122 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26e022b9-e7f5-4787-abe8-9967d8f4d11e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.273130 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/26e022b9-e7f5-4787-abe8-9967d8f4d11e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.274188 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26e022b9-e7f5-4787-abe8-9967d8f4d11e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.280869 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/26e022b9-e7f5-4787-abe8-9967d8f4d11e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.283808 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kwcnz\" (UniqueName: \"kubernetes.io/projected/26e022b9-e7f5-4787-abe8-9967d8f4d11e-kube-api-access-kwcnz\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.287540 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"26e022b9-e7f5-4787-abe8-9967d8f4d11e\") " pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.312919 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Mar 20 13:41:27 crc kubenswrapper[4690]: W0320 13:41:27.321391 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5a4acc48_2e3c_4b76_b55e_e9152c405f11.slice/crio-cf9f5ac184c98a5660bb9fc791ecd3fff930fa9795cefd9710b47006052a4a67 WatchSource:0}: Error finding container cf9f5ac184c98a5660bb9fc791ecd3fff930fa9795cefd9710b47006052a4a67: Status 404 returned error can't find the container with id cf9f5ac184c98a5660bb9fc791ecd3fff930fa9795cefd9710b47006052a4a67 Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.436985 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.497568 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Mar 20 13:41:27 crc kubenswrapper[4690]: W0320 13:41:27.506747 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda81f6ca1_a67e_4cbc_99de_32701eccb13b.slice/crio-76a8218aa419e8d153384b9ac68f96e9daa11f60d0fd31c0b1ba0adb8b0dc39b WatchSource:0}: Error finding container 76a8218aa419e8d153384b9ac68f96e9daa11f60d0fd31c0b1ba0adb8b0dc39b: Status 404 returned error can't find the container with id 76a8218aa419e8d153384b9ac68f96e9daa11f60d0fd31c0b1ba0adb8b0dc39b Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.510842 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Mar 20 13:41:27 crc kubenswrapper[4690]: W0320 13:41:27.537615 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb6c3ab56_9d3c_431c_a697_d6df19b67a21.slice/crio-c833bd014fbc52ff589f31598264825e6b109dbe3ad7c9275aaa33282274cc66 WatchSource:0}: Error finding container c833bd014fbc52ff589f31598264825e6b109dbe3ad7c9275aaa33282274cc66: Status 404 returned error can't find the container with id c833bd014fbc52ff589f31598264825e6b109dbe3ad7c9275aaa33282274cc66 Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.619762 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.627998 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Mar 20 13:41:27 crc kubenswrapper[4690]: W0320 13:41:27.629442 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce9abe13_14e4_4ce8_ae28_b52022d16a0e.slice/crio-bf6ea97b1d6891c9cd0067df2f079e56ac9062663832fc91330cba6c12f9e4c7 WatchSource:0}: Error finding container bf6ea97b1d6891c9cd0067df2f079e56ac9062663832fc91330cba6c12f9e4c7: Status 404 returned error can't find the container with id bf6ea97b1d6891c9cd0067df2f079e56ac9062663832fc91330cba6c12f9e4c7 Mar 20 13:41:27 crc kubenswrapper[4690]: W0320 13:41:27.652316 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc0b5e5a2_350a_4cd6_92aa_2039e7f48cc8.slice/crio-1bab18bfb71fae207c438ff96a541fb05b1e10eb0581527e4f2c5bdb4b2c2fe4 WatchSource:0}: Error finding container 1bab18bfb71fae207c438ff96a541fb05b1e10eb0581527e4f2c5bdb4b2c2fe4: Status 404 returned error can't find the container with id 1bab18bfb71fae207c438ff96a541fb05b1e10eb0581527e4f2c5bdb4b2c2fe4 Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.654588 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.664373 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mxmrl"] Mar 20 13:41:27 crc kubenswrapper[4690]: W0320 13:41:27.664460 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3e883d8_973f_4e69_a13f_175f1904a203.slice/crio-1040739cf1913d44c6e570cdcf88c4e5e759975150e4576d1c8fa8883b7748fe WatchSource:0}: Error finding container 1040739cf1913d44c6e570cdcf88c4e5e759975150e4576d1c8fa8883b7748fe: Status 404 returned error can't find the container with id 1040739cf1913d44c6e570cdcf88c4e5e759975150e4576d1c8fa8883b7748fe Mar 20 13:41:27 crc kubenswrapper[4690]: W0320 13:41:27.672307 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9e0061bd_d72c_4aeb_86f0_154e0cccfe15.slice/crio-6e6d414f5a61ad2ff4046d026a5e98f3f904575c9308d59a5992c437c0cf749d WatchSource:0}: Error finding container 6e6d414f5a61ad2ff4046d026a5e98f3f904575c9308d59a5992c437c0cf749d: Status 404 returned error can't find the container with id 6e6d414f5a61ad2ff4046d026a5e98f3f904575c9308d59a5992c437c0cf749d Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.735888 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5a4acc48-2e3c-4b76-b55e-e9152c405f11","Type":"ContainerStarted","Data":"cf9f5ac184c98a5660bb9fc791ecd3fff930fa9795cefd9710b47006052a4a67"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.740130 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-697np" event={"ID":"5b091e0c-de5b-425b-b9be-0a2def1592fc","Type":"ContainerDied","Data":"870c0e87e1c7ad3294d8f10982324c05ca3b6e4002fc0efe822cbc6d1f8ea3c7"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.740440 4690 generic.go:334] "Generic (PLEG): container finished" podID="5b091e0c-de5b-425b-b9be-0a2def1592fc" containerID="870c0e87e1c7ad3294d8f10982324c05ca3b6e4002fc0efe822cbc6d1f8ea3c7" exitCode=0 Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.742092 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a81f6ca1-a67e-4cbc-99de-32701eccb13b","Type":"ContainerStarted","Data":"76a8218aa419e8d153384b9ac68f96e9daa11f60d0fd31c0b1ba0adb8b0dc39b"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.744574 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ce9abe13-14e4-4ce8-ae28-b52022d16a0e","Type":"ContainerStarted","Data":"bf6ea97b1d6891c9cd0067df2f079e56ac9062663832fc91330cba6c12f9e4c7"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.746056 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mxmrl" event={"ID":"9e0061bd-d72c-4aeb-86f0-154e0cccfe15","Type":"ContainerStarted","Data":"6e6d414f5a61ad2ff4046d026a5e98f3f904575c9308d59a5992c437c0cf749d"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.746574 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-s6fhs"] Mar 20 13:41:27 crc kubenswrapper[4690]: W0320 13:41:27.750549 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podba9868f8_3baf_4ecd_896c_1497873f32d7.slice/crio-1baa376cfb43ab779c8c2aa4e52aa2635926067236d294311e20441740c0c84b WatchSource:0}: Error finding container 1baa376cfb43ab779c8c2aa4e52aa2635926067236d294311e20441740c0c84b: Status 404 returned error can't find the container with id 1baa376cfb43ab779c8c2aa4e52aa2635926067236d294311e20441740c0c84b Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.751993 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b6c3ab56-9d3c-431c-a697-d6df19b67a21","Type":"ContainerStarted","Data":"c833bd014fbc52ff589f31598264825e6b109dbe3ad7c9275aaa33282274cc66"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.753608 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d3e883d8-973f-4e69-a13f-175f1904a203","Type":"ContainerStarted","Data":"1040739cf1913d44c6e570cdcf88c4e5e759975150e4576d1c8fa8883b7748fe"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.766086 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8","Type":"ContainerStarted","Data":"1bab18bfb71fae207c438ff96a541fb05b1e10eb0581527e4f2c5bdb4b2c2fe4"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.768269 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c9508cc5-d6ca-435f-949a-790440ed5f11","Type":"ContainerStarted","Data":"98e955f103db7951f4873882572473e1880595510048a2fd4ec59b64430f17a7"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.770328 4690 generic.go:334] "Generic (PLEG): container finished" podID="c515e2c8-7d1c-4010-989a-1c61f02deea8" containerID="9beb1a329e642e500dfdcc999ed1e1af26106709e23cfee04faa04630361170d" exitCode=0 Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.770399 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" event={"ID":"c515e2c8-7d1c-4010-989a-1c61f02deea8","Type":"ContainerDied","Data":"9beb1a329e642e500dfdcc999ed1e1af26106709e23cfee04faa04630361170d"} Mar 20 13:41:27 crc kubenswrapper[4690]: I0320 13:41:27.997948 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.154002 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.255004 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.294076 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-dns-svc\") pod \"4c086076-52e7-43e8-9110-df582b8773e4\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.294139 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-config\") pod \"4c086076-52e7-43e8-9110-df582b8773e4\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.294202 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psqfz\" (UniqueName: \"kubernetes.io/projected/4c086076-52e7-43e8-9110-df582b8773e4-kube-api-access-psqfz\") pod \"4c086076-52e7-43e8-9110-df582b8773e4\" (UID: \"4c086076-52e7-43e8-9110-df582b8773e4\") " Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.294919 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4c086076-52e7-43e8-9110-df582b8773e4" (UID: "4c086076-52e7-43e8-9110-df582b8773e4"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.295870 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-config" (OuterVolumeSpecName: "config") pod "4c086076-52e7-43e8-9110-df582b8773e4" (UID: "4c086076-52e7-43e8-9110-df582b8773e4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.301887 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c086076-52e7-43e8-9110-df582b8773e4-kube-api-access-psqfz" (OuterVolumeSpecName: "kube-api-access-psqfz") pod "4c086076-52e7-43e8-9110-df582b8773e4" (UID: "4c086076-52e7-43e8-9110-df582b8773e4"). InnerVolumeSpecName "kube-api-access-psqfz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.395478 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2q4fk\" (UniqueName: \"kubernetes.io/projected/e746b93e-8dfe-42ad-a733-2807a0347306-kube-api-access-2q4fk\") pod \"e746b93e-8dfe-42ad-a733-2807a0347306\" (UID: \"e746b93e-8dfe-42ad-a733-2807a0347306\") " Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.395544 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e746b93e-8dfe-42ad-a733-2807a0347306-config\") pod \"e746b93e-8dfe-42ad-a733-2807a0347306\" (UID: \"e746b93e-8dfe-42ad-a733-2807a0347306\") " Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.396162 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.396189 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psqfz\" (UniqueName: \"kubernetes.io/projected/4c086076-52e7-43e8-9110-df582b8773e4-kube-api-access-psqfz\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.396207 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4c086076-52e7-43e8-9110-df582b8773e4-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.396267 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e746b93e-8dfe-42ad-a733-2807a0347306-config" (OuterVolumeSpecName: "config") pod "e746b93e-8dfe-42ad-a733-2807a0347306" (UID: "e746b93e-8dfe-42ad-a733-2807a0347306"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.399378 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e746b93e-8dfe-42ad-a733-2807a0347306-kube-api-access-2q4fk" (OuterVolumeSpecName: "kube-api-access-2q4fk") pod "e746b93e-8dfe-42ad-a733-2807a0347306" (UID: "e746b93e-8dfe-42ad-a733-2807a0347306"). InnerVolumeSpecName "kube-api-access-2q4fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.497369 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2q4fk\" (UniqueName: \"kubernetes.io/projected/e746b93e-8dfe-42ad-a733-2807a0347306-kube-api-access-2q4fk\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.497407 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e746b93e-8dfe-42ad-a733-2807a0347306-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.780786 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.780789 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-s74gm" event={"ID":"e746b93e-8dfe-42ad-a733-2807a0347306","Type":"ContainerDied","Data":"dcff189279fbbe1e2221af554947dc96a52e21906f9df0ddbcab6b1e814736e5"} Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.783383 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"26e022b9-e7f5-4787-abe8-9967d8f4d11e","Type":"ContainerStarted","Data":"0f634632f8d184b81cec8831e77f2d123b2ece52b37723fd85335c8ed2f00f33"} Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.787234 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-697np" event={"ID":"5b091e0c-de5b-425b-b9be-0a2def1592fc","Type":"ContainerStarted","Data":"85480b7aa2c368fa1bafb2c91d9856f4bb254d8b559de7c14055da4b0bb3d21a"} Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.787405 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.789840 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" event={"ID":"c515e2c8-7d1c-4010-989a-1c61f02deea8","Type":"ContainerStarted","Data":"65f76795183883680dc1dd91f84a05caf0196b25313444a056aaaf595ce9535b"} Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.790490 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.797954 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" event={"ID":"4c086076-52e7-43e8-9110-df582b8773e4","Type":"ContainerDied","Data":"8a3a2791a1e74e7ba86f19c2c2cf21061a0e4982be966ce4313746dfc2a79c8b"} Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.798032 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-dhrsg" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.800207 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-s6fhs" event={"ID":"ba9868f8-3baf-4ecd-896c-1497873f32d7","Type":"ContainerStarted","Data":"1baa376cfb43ab779c8c2aa4e52aa2635926067236d294311e20441740c0c84b"} Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.875983 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s74gm"] Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.898185 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-s74gm"] Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.909963 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dhrsg"] Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.913375 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-dhrsg"] Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.915451 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" podStartSLOduration=6.228749735 podStartE2EDuration="15.915434254s" podCreationTimestamp="2026-03-20 13:41:13 +0000 UTC" firstStartedPulling="2026-03-20 13:41:17.251643372 +0000 UTC m=+1123.541243315" lastFinishedPulling="2026-03-20 13:41:26.938327891 +0000 UTC m=+1133.227927834" observedRunningTime="2026-03-20 13:41:28.851925812 +0000 UTC m=+1135.141525765" watchObservedRunningTime="2026-03-20 13:41:28.915434254 +0000 UTC m=+1135.205034197" Mar 20 13:41:28 crc kubenswrapper[4690]: I0320 13:41:28.920242 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-697np" podStartSLOduration=3.013684016 podStartE2EDuration="15.920234932s" podCreationTimestamp="2026-03-20 13:41:13 +0000 UTC" firstStartedPulling="2026-03-20 13:41:14.022478268 +0000 UTC m=+1120.312078211" lastFinishedPulling="2026-03-20 13:41:26.929029184 +0000 UTC m=+1133.218629127" observedRunningTime="2026-03-20 13:41:28.875944571 +0000 UTC m=+1135.165544514" watchObservedRunningTime="2026-03-20 13:41:28.920234932 +0000 UTC m=+1135.209834875" Mar 20 13:41:30 crc kubenswrapper[4690]: I0320 13:41:30.427760 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c086076-52e7-43e8-9110-df582b8773e4" path="/var/lib/kubelet/pods/4c086076-52e7-43e8-9110-df582b8773e4/volumes" Mar 20 13:41:30 crc kubenswrapper[4690]: I0320 13:41:30.428270 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e746b93e-8dfe-42ad-a733-2807a0347306" path="/var/lib/kubelet/pods/e746b93e-8dfe-42ad-a733-2807a0347306/volumes" Mar 20 13:41:33 crc kubenswrapper[4690]: I0320 13:41:33.571069 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:33 crc kubenswrapper[4690]: I0320 13:41:33.753001 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:41:33 crc kubenswrapper[4690]: I0320 13:41:33.827080 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-697np"] Mar 20 13:41:33 crc kubenswrapper[4690]: I0320 13:41:33.829513 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:41:33 crc kubenswrapper[4690]: I0320 13:41:33.829561 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:41:33 crc kubenswrapper[4690]: I0320 13:41:33.829598 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:41:33 crc kubenswrapper[4690]: I0320 13:41:33.830295 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1cbfcd3ff515926ff968741597036e24520ad18c5c3213b253b31335bc1c23e8"} pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 13:41:33 crc kubenswrapper[4690]: I0320 13:41:33.830552 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" containerID="cri-o://1cbfcd3ff515926ff968741597036e24520ad18c5c3213b253b31335bc1c23e8" gracePeriod=600 Mar 20 13:41:33 crc kubenswrapper[4690]: I0320 13:41:33.859233 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-697np" podUID="5b091e0c-de5b-425b-b9be-0a2def1592fc" containerName="dnsmasq-dns" containerID="cri-o://85480b7aa2c368fa1bafb2c91d9856f4bb254d8b559de7c14055da4b0bb3d21a" gracePeriod=10 Mar 20 13:41:34 crc kubenswrapper[4690]: I0320 13:41:34.874295 4690 generic.go:334] "Generic (PLEG): container finished" podID="5b091e0c-de5b-425b-b9be-0a2def1592fc" containerID="85480b7aa2c368fa1bafb2c91d9856f4bb254d8b559de7c14055da4b0bb3d21a" exitCode=0 Mar 20 13:41:34 crc kubenswrapper[4690]: I0320 13:41:34.874338 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-697np" event={"ID":"5b091e0c-de5b-425b-b9be-0a2def1592fc","Type":"ContainerDied","Data":"85480b7aa2c368fa1bafb2c91d9856f4bb254d8b559de7c14055da4b0bb3d21a"} Mar 20 13:41:34 crc kubenswrapper[4690]: I0320 13:41:34.877587 4690 generic.go:334] "Generic (PLEG): container finished" podID="60ded650-b298-4115-8286-8969b94d4062" containerID="1cbfcd3ff515926ff968741597036e24520ad18c5c3213b253b31335bc1c23e8" exitCode=0 Mar 20 13:41:34 crc kubenswrapper[4690]: I0320 13:41:34.877619 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerDied","Data":"1cbfcd3ff515926ff968741597036e24520ad18c5c3213b253b31335bc1c23e8"} Mar 20 13:41:34 crc kubenswrapper[4690]: I0320 13:41:34.877675 4690 scope.go:117] "RemoveContainer" containerID="a92e622601d5d0cbc8d5ec8266b1e5ffd0ed3023dc04d14e7b0e5bdc6a68783b" Mar 20 13:41:38 crc kubenswrapper[4690]: I0320 13:41:38.570616 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-666b6646f7-697np" podUID="5b091e0c-de5b-425b-b9be-0a2def1592fc" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.97:5353: connect: connection refused" Mar 20 13:41:39 crc kubenswrapper[4690]: E0320 13:41:39.645184 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified" Mar 20 13:41:39 crc kubenswrapper[4690]: E0320 13:41:39.645616 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ovsdbserver-sb,Image:quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified,Command:[/usr/bin/dumb-init],Args:[/usr/local/bin/container-scripts/setup.sh],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n656h5dbh5c8h68fh57bh5f5h5cdh9ch68fh5dfhb8hdbh8bh5c7h674h555hfbh84h8ch58fh596h6dhfbh5b7hb6h668h699h64ch5fdh675h575h57cq,ValueFrom:nil,},EnvVar{Name:OVN_LOGDIR,Value:/tmp,ValueFrom:nil,},EnvVar{Name:OVN_RUNDIR,Value:/tmp,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovndbcluster-sb-etc-ovn,ReadOnly:false,MountPath:/etc/ovn,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdb-rundir,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndb.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/private/ovndb.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb-tls-certs,ReadOnly:true,MountPath:/etc/pki/tls/certs/ovndbca.crt,SubPath:ca.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d9l2k,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:&Lifecycle{PostStart:nil,PreStop:&LifecycleHandler{Exec:&ExecAction{Command:[/usr/local/bin/container-scripts/cleanup.sh],},HTTPGet:nil,TCPSocket:nil,Sleep:nil,},},TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/pidof ovsdb-server],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:20,TerminationGracePeriodSeconds:nil,},ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovsdbserver-sb-0_openstack(c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.007325 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.108154 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-dns-svc\") pod \"5b091e0c-de5b-425b-b9be-0a2def1592fc\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.108207 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-config\") pod \"5b091e0c-de5b-425b-b9be-0a2def1592fc\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.108303 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wqm4f\" (UniqueName: \"kubernetes.io/projected/5b091e0c-de5b-425b-b9be-0a2def1592fc-kube-api-access-wqm4f\") pod \"5b091e0c-de5b-425b-b9be-0a2def1592fc\" (UID: \"5b091e0c-de5b-425b-b9be-0a2def1592fc\") " Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.113816 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b091e0c-de5b-425b-b9be-0a2def1592fc-kube-api-access-wqm4f" (OuterVolumeSpecName: "kube-api-access-wqm4f") pod "5b091e0c-de5b-425b-b9be-0a2def1592fc" (UID: "5b091e0c-de5b-425b-b9be-0a2def1592fc"). InnerVolumeSpecName "kube-api-access-wqm4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.144830 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-config" (OuterVolumeSpecName: "config") pod "5b091e0c-de5b-425b-b9be-0a2def1592fc" (UID: "5b091e0c-de5b-425b-b9be-0a2def1592fc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.158983 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5b091e0c-de5b-425b-b9be-0a2def1592fc" (UID: "5b091e0c-de5b-425b-b9be-0a2def1592fc"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.210922 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wqm4f\" (UniqueName: \"kubernetes.io/projected/5b091e0c-de5b-425b-b9be-0a2def1592fc-kube-api-access-wqm4f\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.210998 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.211026 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b091e0c-de5b-425b-b9be-0a2def1592fc-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.919559 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-697np" event={"ID":"5b091e0c-de5b-425b-b9be-0a2def1592fc","Type":"ContainerDied","Data":"fe8a537c27090391210ea30cbfaebe035c1d6fd9459aa7ee243281020f1dcdf8"} Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.919946 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-697np" Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.941887 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-697np"] Mar 20 13:41:40 crc kubenswrapper[4690]: I0320 13:41:40.949137 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-697np"] Mar 20 13:41:41 crc kubenswrapper[4690]: I0320 13:41:41.151352 4690 scope.go:117] "RemoveContainer" containerID="85480b7aa2c368fa1bafb2c91d9856f4bb254d8b559de7c14055da4b0bb3d21a" Mar 20 13:41:42 crc kubenswrapper[4690]: I0320 13:41:42.426661 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b091e0c-de5b-425b-b9be-0a2def1592fc" path="/var/lib/kubelet/pods/5b091e0c-de5b-425b-b9be-0a2def1592fc/volumes" Mar 20 13:41:42 crc kubenswrapper[4690]: E0320 13:41:42.876524 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/openstack-rabbitmq:r42p" Mar 20 13:41:42 crc kubenswrapper[4690]: E0320 13:41:42.876663 4690 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 13:41:42 crc kubenswrapper[4690]: init container &Container{Name:setup-container,Image:quay.io/lmiccini/openstack-rabbitmq:r42p,Command:[sh -c],Args:[set -e Mar 20 13:41:42 crc kubenswrapper[4690]: cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie Mar 20 13:41:42 crc kubenswrapper[4690]: chmod 600 /var/lib/rabbitmq/.erlang.cookie Mar 20 13:41:42 crc kubenswrapper[4690]: cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins Mar 20 13:41:42 crc kubenswrapper[4690]: echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf Mar 20 13:41:42 crc kubenswrapper[4690]: sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf Mar 20 13:41:42 crc kubenswrapper[4690]: chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf Mar 20 13:41:42 crc kubenswrapper[4690]: # Allow time for multi-pod clusters to complete peer discovery Mar 20 13:41:42 crc kubenswrapper[4690]: sleep 30],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h9w6z,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(b6c3ab56-9d3c-431c-a697-d6df19b67a21): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled Mar 20 13:41:42 crc kubenswrapper[4690]: > logger="UnhandledError" Mar 20 13:41:42 crc kubenswrapper[4690]: E0320 13:41:42.877741 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="b6c3ab56-9d3c-431c-a697-d6df19b67a21" Mar 20 13:41:42 crc kubenswrapper[4690]: E0320 13:41:42.893312 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/lmiccini/openstack-rabbitmq:r42p" Mar 20 13:41:42 crc kubenswrapper[4690]: E0320 13:41:42.893483 4690 kuberuntime_manager.go:1274] "Unhandled Error" err=< Mar 20 13:41:42 crc kubenswrapper[4690]: init container &Container{Name:setup-container,Image:quay.io/lmiccini/openstack-rabbitmq:r42p,Command:[sh -c],Args:[set -e Mar 20 13:41:42 crc kubenswrapper[4690]: cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie Mar 20 13:41:42 crc kubenswrapper[4690]: chmod 600 /var/lib/rabbitmq/.erlang.cookie Mar 20 13:41:42 crc kubenswrapper[4690]: cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins Mar 20 13:41:42 crc kubenswrapper[4690]: echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf Mar 20 13:41:42 crc kubenswrapper[4690]: sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf Mar 20 13:41:42 crc kubenswrapper[4690]: chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf Mar 20 13:41:42 crc kubenswrapper[4690]: # Allow time for multi-pod clusters to complete peer discovery Mar 20 13:41:42 crc kubenswrapper[4690]: sleep 30],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-v9x4g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(c9508cc5-d6ca-435f-949a-790440ed5f11): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled Mar 20 13:41:42 crc kubenswrapper[4690]: > logger="UnhandledError" Mar 20 13:41:42 crc kubenswrapper[4690]: E0320 13:41:42.894766 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="c9508cc5-d6ca-435f-949a-790440ed5f11" Mar 20 13:41:42 crc kubenswrapper[4690]: E0320 13:41:42.952481 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/openstack-rabbitmq:r42p\\\"\"" pod="openstack/rabbitmq-server-0" podUID="c9508cc5-d6ca-435f-949a-790440ed5f11" Mar 20 13:41:42 crc kubenswrapper[4690]: E0320 13:41:42.952707 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/lmiccini/openstack-rabbitmq:r42p\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="b6c3ab56-9d3c-431c-a697-d6df19b67a21" Mar 20 13:41:44 crc kubenswrapper[4690]: I0320 13:41:44.177552 4690 scope.go:117] "RemoveContainer" containerID="870c0e87e1c7ad3294d8f10982324c05ca3b6e4002fc0efe822cbc6d1f8ea3c7" Mar 20 13:41:44 crc kubenswrapper[4690]: I0320 13:41:44.959518 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"61fd0b68cc3ec6d77f02280694bc855224d1387694bd1b6f59471ac008b5cb66"} Mar 20 13:41:45 crc kubenswrapper[4690]: E0320 13:41:45.547024 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Mar 20 13:41:45 crc kubenswrapper[4690]: E0320 13:41:45.547425 4690 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Mar 20 13:41:45 crc kubenswrapper[4690]: E0320 13:41:45.547760 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-nnn4t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(d3e883d8-973f-4e69-a13f-175f1904a203): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Mar 20 13:41:45 crc kubenswrapper[4690]: E0320 13:41:45.548973 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openstack/kube-state-metrics-0" podUID="d3e883d8-973f-4e69-a13f-175f1904a203" Mar 20 13:41:45 crc kubenswrapper[4690]: E0320 13:41:45.910498 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ovsdbserver-sb-0" podUID="c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8" Mar 20 13:41:45 crc kubenswrapper[4690]: I0320 13:41:45.967288 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"26e022b9-e7f5-4787-abe8-9967d8f4d11e","Type":"ContainerStarted","Data":"b8b244f84f8ab6d256a95d426ae421d8174c49be15c5c50507663d007bf9c4fd"} Mar 20 13:41:45 crc kubenswrapper[4690]: I0320 13:41:45.969318 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8","Type":"ContainerStarted","Data":"eaf0a184025703a29b2af723468ee488c70fe1d65dd4e0942860f7ee7daa72e6"} Mar 20 13:41:45 crc kubenswrapper[4690]: I0320 13:41:45.970417 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a81f6ca1-a67e-4cbc-99de-32701eccb13b","Type":"ContainerStarted","Data":"708fdada739773ce2ffbdebfa2a92caa23f0398ea70b03cd636d51400417ebb0"} Mar 20 13:41:45 crc kubenswrapper[4690]: I0320 13:41:45.972565 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"ce9abe13-14e4-4ce8-ae28-b52022d16a0e","Type":"ContainerStarted","Data":"84180088df4f6d32472a593af471f78fa2e648ba02b693a8bed45598f6ed2ab6"} Mar 20 13:41:45 crc kubenswrapper[4690]: I0320 13:41:45.972667 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Mar 20 13:41:45 crc kubenswrapper[4690]: I0320 13:41:45.973673 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5a4acc48-2e3c-4b76-b55e-e9152c405f11","Type":"ContainerStarted","Data":"1672827a412e2db2b147eebe93b3a834cab3e1b71ab376757c15e08e2df51304"} Mar 20 13:41:45 crc kubenswrapper[4690]: I0320 13:41:45.974824 4690 generic.go:334] "Generic (PLEG): container finished" podID="ba9868f8-3baf-4ecd-896c-1497873f32d7" containerID="fb8e18a8f37c173ec2782be7b2ae311f0247ecfc88e38200ee156f47b2da11ae" exitCode=0 Mar 20 13:41:45 crc kubenswrapper[4690]: I0320 13:41:45.974917 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-s6fhs" event={"ID":"ba9868f8-3baf-4ecd-896c-1497873f32d7","Type":"ContainerDied","Data":"fb8e18a8f37c173ec2782be7b2ae311f0247ecfc88e38200ee156f47b2da11ae"} Mar 20 13:41:46 crc kubenswrapper[4690]: E0320 13:41:46.018719 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8" Mar 20 13:41:46 crc kubenswrapper[4690]: E0320 13:41:46.018769 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="d3e883d8-973f-4e69-a13f-175f1904a203" Mar 20 13:41:46 crc kubenswrapper[4690]: I0320 13:41:46.057200 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=15.536812314 podStartE2EDuration="29.057184822s" podCreationTimestamp="2026-03-20 13:41:17 +0000 UTC" firstStartedPulling="2026-03-20 13:41:27.631007828 +0000 UTC m=+1133.920607771" lastFinishedPulling="2026-03-20 13:41:41.151380336 +0000 UTC m=+1147.440980279" observedRunningTime="2026-03-20 13:41:46.034388938 +0000 UTC m=+1152.323988871" watchObservedRunningTime="2026-03-20 13:41:46.057184822 +0000 UTC m=+1152.346784765" Mar 20 13:41:46 crc kubenswrapper[4690]: I0320 13:41:46.983729 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mxmrl" event={"ID":"9e0061bd-d72c-4aeb-86f0-154e0cccfe15","Type":"ContainerStarted","Data":"c8e3a323a4517c263dc4e71fc3bfc59d3f0f4743cd5d7d28201f5dbd0314b742"} Mar 20 13:41:46 crc kubenswrapper[4690]: I0320 13:41:46.984337 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-mxmrl" Mar 20 13:41:46 crc kubenswrapper[4690]: I0320 13:41:46.987072 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-s6fhs" event={"ID":"ba9868f8-3baf-4ecd-896c-1497873f32d7","Type":"ContainerStarted","Data":"b13ada7dc35ba4d4596e5cbbc89f21761282c883c3005b0be95d9542f3b2d85a"} Mar 20 13:41:46 crc kubenswrapper[4690]: I0320 13:41:46.987109 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-s6fhs" event={"ID":"ba9868f8-3baf-4ecd-896c-1497873f32d7","Type":"ContainerStarted","Data":"4df7cfad63767b031c259351b6b09970d9caca8b99268138438a4b035072e2e2"} Mar 20 13:41:46 crc kubenswrapper[4690]: I0320 13:41:46.987146 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:46 crc kubenswrapper[4690]: I0320 13:41:46.987763 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:41:46 crc kubenswrapper[4690]: I0320 13:41:46.989797 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"26e022b9-e7f5-4787-abe8-9967d8f4d11e","Type":"ContainerStarted","Data":"f8386a182669b4455fc755f4ef62c43bc6c3a7386b46bbd3d847c3426c227320"} Mar 20 13:41:46 crc kubenswrapper[4690]: E0320 13:41:46.991528 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovsdbserver-sb\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-ovn-sb-db-server:current-podified\\\"\"" pod="openstack/ovsdbserver-sb-0" podUID="c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8" Mar 20 13:41:47 crc kubenswrapper[4690]: I0320 13:41:47.005655 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-mxmrl" podStartSLOduration=8.866262623 podStartE2EDuration="24.005605929s" podCreationTimestamp="2026-03-20 13:41:23 +0000 UTC" firstStartedPulling="2026-03-20 13:41:27.674915008 +0000 UTC m=+1133.964514951" lastFinishedPulling="2026-03-20 13:41:42.814258314 +0000 UTC m=+1149.103858257" observedRunningTime="2026-03-20 13:41:47.003997492 +0000 UTC m=+1153.293597475" watchObservedRunningTime="2026-03-20 13:41:47.005605929 +0000 UTC m=+1153.295205902" Mar 20 13:41:47 crc kubenswrapper[4690]: I0320 13:41:47.027888 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=8.939525574 podStartE2EDuration="22.027847957s" podCreationTimestamp="2026-03-20 13:41:25 +0000 UTC" firstStartedPulling="2026-03-20 13:41:28.07791976 +0000 UTC m=+1134.367519703" lastFinishedPulling="2026-03-20 13:41:41.166242133 +0000 UTC m=+1147.455842086" observedRunningTime="2026-03-20 13:41:47.025773867 +0000 UTC m=+1153.315373820" watchObservedRunningTime="2026-03-20 13:41:47.027847957 +0000 UTC m=+1153.317447940" Mar 20 13:41:47 crc kubenswrapper[4690]: I0320 13:41:47.078821 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-s6fhs" podStartSLOduration=10.666618745 podStartE2EDuration="24.078800709s" podCreationTimestamp="2026-03-20 13:41:23 +0000 UTC" firstStartedPulling="2026-03-20 13:41:27.753577255 +0000 UTC m=+1134.043177198" lastFinishedPulling="2026-03-20 13:41:41.165759219 +0000 UTC m=+1147.455359162" observedRunningTime="2026-03-20 13:41:47.056752046 +0000 UTC m=+1153.346351999" watchObservedRunningTime="2026-03-20 13:41:47.078800709 +0000 UTC m=+1153.368400662" Mar 20 13:41:47 crc kubenswrapper[4690]: I0320 13:41:47.437698 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:48 crc kubenswrapper[4690]: I0320 13:41:48.457925 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:48 crc kubenswrapper[4690]: I0320 13:41:48.490921 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.025305 4690 generic.go:334] "Generic (PLEG): container finished" podID="a81f6ca1-a67e-4cbc-99de-32701eccb13b" containerID="708fdada739773ce2ffbdebfa2a92caa23f0398ea70b03cd636d51400417ebb0" exitCode=0 Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.025425 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a81f6ca1-a67e-4cbc-99de-32701eccb13b","Type":"ContainerDied","Data":"708fdada739773ce2ffbdebfa2a92caa23f0398ea70b03cd636d51400417ebb0"} Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.027828 4690 generic.go:334] "Generic (PLEG): container finished" podID="5a4acc48-2e3c-4b76-b55e-e9152c405f11" containerID="1672827a412e2db2b147eebe93b3a834cab3e1b71ab376757c15e08e2df51304" exitCode=0 Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.027946 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5a4acc48-2e3c-4b76-b55e-e9152c405f11","Type":"ContainerDied","Data":"1672827a412e2db2b147eebe93b3a834cab3e1b71ab376757c15e08e2df51304"} Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.084333 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.355215 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-868wr"] Mar 20 13:41:50 crc kubenswrapper[4690]: E0320 13:41:50.355530 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b091e0c-de5b-425b-b9be-0a2def1592fc" containerName="init" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.355550 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b091e0c-de5b-425b-b9be-0a2def1592fc" containerName="init" Mar 20 13:41:50 crc kubenswrapper[4690]: E0320 13:41:50.355564 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b091e0c-de5b-425b-b9be-0a2def1592fc" containerName="dnsmasq-dns" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.355571 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b091e0c-de5b-425b-b9be-0a2def1592fc" containerName="dnsmasq-dns" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.355721 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b091e0c-de5b-425b-b9be-0a2def1592fc" containerName="dnsmasq-dns" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.356411 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.358199 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.368429 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-868wr"] Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.402482 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-f2m7w"] Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.403653 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.411485 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.442181 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-f2m7w"] Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.501829 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.502649 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ljsp\" (UniqueName: \"kubernetes.io/projected/9b79858b-d182-4740-aba6-ed9fb668f22c-kube-api-access-2ljsp\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.502703 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-config\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.502778 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.609328 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/db48a1e3-3cbe-4b9c-b68f-92a011543076-ovs-rundir\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.609800 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db48a1e3-3cbe-4b9c-b68f-92a011543076-config\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.609837 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db48a1e3-3cbe-4b9c-b68f-92a011543076-combined-ca-bundle\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.609983 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/db48a1e3-3cbe-4b9c-b68f-92a011543076-ovn-rundir\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.610103 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p8xtv\" (UniqueName: \"kubernetes.io/projected/db48a1e3-3cbe-4b9c-b68f-92a011543076-kube-api-access-p8xtv\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.610145 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.610187 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ljsp\" (UniqueName: \"kubernetes.io/projected/9b79858b-d182-4740-aba6-ed9fb668f22c-kube-api-access-2ljsp\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.610237 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-config\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.610267 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db48a1e3-3cbe-4b9c-b68f-92a011543076-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.610311 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.611435 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-ovsdbserver-nb\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.614279 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-dns-svc\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.614418 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-config\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.636412 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ljsp\" (UniqueName: \"kubernetes.io/projected/9b79858b-d182-4740-aba6-ed9fb668f22c-kube-api-access-2ljsp\") pod \"dnsmasq-dns-5bf47b49b7-868wr\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.670601 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.709156 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-868wr"] Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.712116 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/db48a1e3-3cbe-4b9c-b68f-92a011543076-ovs-rundir\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.712176 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db48a1e3-3cbe-4b9c-b68f-92a011543076-config\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.712203 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db48a1e3-3cbe-4b9c-b68f-92a011543076-combined-ca-bundle\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.712228 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/db48a1e3-3cbe-4b9c-b68f-92a011543076-ovn-rundir\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.712284 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p8xtv\" (UniqueName: \"kubernetes.io/projected/db48a1e3-3cbe-4b9c-b68f-92a011543076-kube-api-access-p8xtv\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.712334 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db48a1e3-3cbe-4b9c-b68f-92a011543076-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.712892 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/db48a1e3-3cbe-4b9c-b68f-92a011543076-ovn-rundir\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.713212 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/db48a1e3-3cbe-4b9c-b68f-92a011543076-config\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.713318 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/db48a1e3-3cbe-4b9c-b68f-92a011543076-ovs-rundir\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.724937 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/db48a1e3-3cbe-4b9c-b68f-92a011543076-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.728567 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db48a1e3-3cbe-4b9c-b68f-92a011543076-combined-ca-bundle\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.730133 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-xtj9s"] Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.732001 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.735608 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.737364 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p8xtv\" (UniqueName: \"kubernetes.io/projected/db48a1e3-3cbe-4b9c-b68f-92a011543076-kube-api-access-p8xtv\") pod \"ovn-controller-metrics-f2m7w\" (UID: \"db48a1e3-3cbe-4b9c-b68f-92a011543076\") " pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.741741 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-xtj9s"] Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.813387 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-config\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.813636 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.813745 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-dns-svc\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.813781 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkw27\" (UniqueName: \"kubernetes.io/projected/bf577129-6184-4843-9b9d-7a5fa9dd5088-kube-api-access-zkw27\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.813810 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.915267 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-dns-svc\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.915313 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkw27\" (UniqueName: \"kubernetes.io/projected/bf577129-6184-4843-9b9d-7a5fa9dd5088-kube-api-access-zkw27\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.915332 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.915387 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-config\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.915404 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.916207 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.916680 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-dns-svc\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.917621 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-config\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.918069 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:50 crc kubenswrapper[4690]: I0320 13:41:50.937998 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkw27\" (UniqueName: \"kubernetes.io/projected/bf577129-6184-4843-9b9d-7a5fa9dd5088-kube-api-access-zkw27\") pod \"dnsmasq-dns-8554648995-xtj9s\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:51 crc kubenswrapper[4690]: I0320 13:41:51.031057 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-f2m7w" Mar 20 13:41:51 crc kubenswrapper[4690]: I0320 13:41:51.038799 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"a81f6ca1-a67e-4cbc-99de-32701eccb13b","Type":"ContainerStarted","Data":"84968102a150e4d07b24c3fc51e5f1d1d441065b7573ed4ff18caa8d091ac744"} Mar 20 13:41:51 crc kubenswrapper[4690]: I0320 13:41:51.041277 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"5a4acc48-2e3c-4b76-b55e-e9152c405f11","Type":"ContainerStarted","Data":"e93a2e1887e41c59249ce536069f23b67fdf4788a7ff30e24b25b1480089c1c7"} Mar 20 13:41:51 crc kubenswrapper[4690]: I0320 13:41:51.050990 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:51 crc kubenswrapper[4690]: I0320 13:41:51.092393 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=19.699029948 podStartE2EDuration="35.092369252s" podCreationTimestamp="2026-03-20 13:41:16 +0000 UTC" firstStartedPulling="2026-03-20 13:41:27.525738977 +0000 UTC m=+1133.815338920" lastFinishedPulling="2026-03-20 13:41:42.919078281 +0000 UTC m=+1149.208678224" observedRunningTime="2026-03-20 13:41:51.087298837 +0000 UTC m=+1157.376898790" watchObservedRunningTime="2026-03-20 13:41:51.092369252 +0000 UTC m=+1157.381969195" Mar 20 13:41:51 crc kubenswrapper[4690]: I0320 13:41:51.130054 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=23.302955014 podStartE2EDuration="37.130034933s" podCreationTimestamp="2026-03-20 13:41:14 +0000 UTC" firstStartedPulling="2026-03-20 13:41:27.324258736 +0000 UTC m=+1133.613858679" lastFinishedPulling="2026-03-20 13:41:41.151338655 +0000 UTC m=+1147.440938598" observedRunningTime="2026-03-20 13:41:51.128002705 +0000 UTC m=+1157.417602658" watchObservedRunningTime="2026-03-20 13:41:51.130034933 +0000 UTC m=+1157.419634876" Mar 20 13:41:51 crc kubenswrapper[4690]: I0320 13:41:51.208812 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-868wr"] Mar 20 13:41:51 crc kubenswrapper[4690]: W0320 13:41:51.209963 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9b79858b_d182_4740_aba6_ed9fb668f22c.slice/crio-4402731b77384f263a8b2a573a5b7720b9d0f1ac1e6e744defa33e63cca70eae WatchSource:0}: Error finding container 4402731b77384f263a8b2a573a5b7720b9d0f1ac1e6e744defa33e63cca70eae: Status 404 returned error can't find the container with id 4402731b77384f263a8b2a573a5b7720b9d0f1ac1e6e744defa33e63cca70eae Mar 20 13:41:51 crc kubenswrapper[4690]: I0320 13:41:51.682274 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-xtj9s"] Mar 20 13:41:51 crc kubenswrapper[4690]: W0320 13:41:51.694551 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbf577129_6184_4843_9b9d_7a5fa9dd5088.slice/crio-fe623659ec8ae4accc0a445ab5578ecaefd8df0291bbd63c87bdc3e3eecb26cf WatchSource:0}: Error finding container fe623659ec8ae4accc0a445ab5578ecaefd8df0291bbd63c87bdc3e3eecb26cf: Status 404 returned error can't find the container with id fe623659ec8ae4accc0a445ab5578ecaefd8df0291bbd63c87bdc3e3eecb26cf Mar 20 13:41:51 crc kubenswrapper[4690]: I0320 13:41:51.702103 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-f2m7w"] Mar 20 13:41:51 crc kubenswrapper[4690]: W0320 13:41:51.704009 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddb48a1e3_3cbe_4b9c_b68f_92a011543076.slice/crio-71d0c3e8269a96275a48691a0bfb8d9e269b422a2eee78760fea4297e0616003 WatchSource:0}: Error finding container 71d0c3e8269a96275a48691a0bfb8d9e269b422a2eee78760fea4297e0616003: Status 404 returned error can't find the container with id 71d0c3e8269a96275a48691a0bfb8d9e269b422a2eee78760fea4297e0616003 Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.052250 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-f2m7w" event={"ID":"db48a1e3-3cbe-4b9c-b68f-92a011543076","Type":"ContainerStarted","Data":"a549c6ea5c2d1311944e3b165cfde382d29cbea2773a7403cb3405daeeada84d"} Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.052639 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-f2m7w" event={"ID":"db48a1e3-3cbe-4b9c-b68f-92a011543076","Type":"ContainerStarted","Data":"71d0c3e8269a96275a48691a0bfb8d9e269b422a2eee78760fea4297e0616003"} Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.053706 4690 generic.go:334] "Generic (PLEG): container finished" podID="9b79858b-d182-4740-aba6-ed9fb668f22c" containerID="828ee2b3d4473a783462956e0ae4904712d3ac05bf3b9ce54285880eff24111b" exitCode=0 Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.053777 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" event={"ID":"9b79858b-d182-4740-aba6-ed9fb668f22c","Type":"ContainerDied","Data":"828ee2b3d4473a783462956e0ae4904712d3ac05bf3b9ce54285880eff24111b"} Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.053803 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" event={"ID":"9b79858b-d182-4740-aba6-ed9fb668f22c","Type":"ContainerStarted","Data":"4402731b77384f263a8b2a573a5b7720b9d0f1ac1e6e744defa33e63cca70eae"} Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.057799 4690 generic.go:334] "Generic (PLEG): container finished" podID="bf577129-6184-4843-9b9d-7a5fa9dd5088" containerID="2a446c7c379728d0065b842473cd79b04ce32c5675052987db2e92b88636aeab" exitCode=0 Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.057879 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-xtj9s" event={"ID":"bf577129-6184-4843-9b9d-7a5fa9dd5088","Type":"ContainerDied","Data":"2a446c7c379728d0065b842473cd79b04ce32c5675052987db2e92b88636aeab"} Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.057912 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-xtj9s" event={"ID":"bf577129-6184-4843-9b9d-7a5fa9dd5088","Type":"ContainerStarted","Data":"fe623659ec8ae4accc0a445ab5578ecaefd8df0291bbd63c87bdc3e3eecb26cf"} Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.082739 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-f2m7w" podStartSLOduration=2.082718462 podStartE2EDuration="2.082718462s" podCreationTimestamp="2026-03-20 13:41:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:41:52.074745523 +0000 UTC m=+1158.364345466" watchObservedRunningTime="2026-03-20 13:41:52.082718462 +0000 UTC m=+1158.372318395" Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.374216 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.561228 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-config\") pod \"9b79858b-d182-4740-aba6-ed9fb668f22c\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.561292 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-dns-svc\") pod \"9b79858b-d182-4740-aba6-ed9fb668f22c\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.561464 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ljsp\" (UniqueName: \"kubernetes.io/projected/9b79858b-d182-4740-aba6-ed9fb668f22c-kube-api-access-2ljsp\") pod \"9b79858b-d182-4740-aba6-ed9fb668f22c\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.561498 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-ovsdbserver-nb\") pod \"9b79858b-d182-4740-aba6-ed9fb668f22c\" (UID: \"9b79858b-d182-4740-aba6-ed9fb668f22c\") " Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.566551 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9b79858b-d182-4740-aba6-ed9fb668f22c-kube-api-access-2ljsp" (OuterVolumeSpecName: "kube-api-access-2ljsp") pod "9b79858b-d182-4740-aba6-ed9fb668f22c" (UID: "9b79858b-d182-4740-aba6-ed9fb668f22c"). InnerVolumeSpecName "kube-api-access-2ljsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.579995 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9b79858b-d182-4740-aba6-ed9fb668f22c" (UID: "9b79858b-d182-4740-aba6-ed9fb668f22c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.591248 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-config" (OuterVolumeSpecName: "config") pod "9b79858b-d182-4740-aba6-ed9fb668f22c" (UID: "9b79858b-d182-4740-aba6-ed9fb668f22c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.605773 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9b79858b-d182-4740-aba6-ed9fb668f22c" (UID: "9b79858b-d182-4740-aba6-ed9fb668f22c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.663753 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.663998 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.664073 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ljsp\" (UniqueName: \"kubernetes.io/projected/9b79858b-d182-4740-aba6-ed9fb668f22c-kube-api-access-2ljsp\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:52 crc kubenswrapper[4690]: I0320 13:41:52.664130 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9b79858b-d182-4740-aba6-ed9fb668f22c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:41:53 crc kubenswrapper[4690]: I0320 13:41:53.066413 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-xtj9s" event={"ID":"bf577129-6184-4843-9b9d-7a5fa9dd5088","Type":"ContainerStarted","Data":"c44ff7644150eb309cd029aeca83d66520292aa37e28f894f5c46dbd4972faa7"} Mar 20 13:41:53 crc kubenswrapper[4690]: I0320 13:41:53.067437 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:41:53 crc kubenswrapper[4690]: I0320 13:41:53.071346 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" event={"ID":"9b79858b-d182-4740-aba6-ed9fb668f22c","Type":"ContainerDied","Data":"4402731b77384f263a8b2a573a5b7720b9d0f1ac1e6e744defa33e63cca70eae"} Mar 20 13:41:53 crc kubenswrapper[4690]: I0320 13:41:53.071400 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5bf47b49b7-868wr" Mar 20 13:41:53 crc kubenswrapper[4690]: I0320 13:41:53.071438 4690 scope.go:117] "RemoveContainer" containerID="828ee2b3d4473a783462956e0ae4904712d3ac05bf3b9ce54285880eff24111b" Mar 20 13:41:53 crc kubenswrapper[4690]: I0320 13:41:53.126903 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-xtj9s" podStartSLOduration=3.126835513 podStartE2EDuration="3.126835513s" podCreationTimestamp="2026-03-20 13:41:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:41:53.083053746 +0000 UTC m=+1159.372653689" watchObservedRunningTime="2026-03-20 13:41:53.126835513 +0000 UTC m=+1159.416435456" Mar 20 13:41:53 crc kubenswrapper[4690]: I0320 13:41:53.179667 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-868wr"] Mar 20 13:41:53 crc kubenswrapper[4690]: I0320 13:41:53.187078 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5bf47b49b7-868wr"] Mar 20 13:41:53 crc kubenswrapper[4690]: I0320 13:41:53.200323 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Mar 20 13:41:54 crc kubenswrapper[4690]: I0320 13:41:54.429678 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9b79858b-d182-4740-aba6-ed9fb668f22c" path="/var/lib/kubelet/pods/9b79858b-d182-4740-aba6-ed9fb668f22c/volumes" Mar 20 13:41:56 crc kubenswrapper[4690]: I0320 13:41:56.328787 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Mar 20 13:41:56 crc kubenswrapper[4690]: I0320 13:41:56.329091 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Mar 20 13:41:56 crc kubenswrapper[4690]: I0320 13:41:56.424537 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Mar 20 13:41:57 crc kubenswrapper[4690]: I0320 13:41:57.229767 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Mar 20 13:41:57 crc kubenswrapper[4690]: I0320 13:41:57.876388 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:57 crc kubenswrapper[4690]: I0320 13:41:57.876542 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:57 crc kubenswrapper[4690]: I0320 13:41:57.954151 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:58 crc kubenswrapper[4690]: I0320 13:41:58.186482 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.371994 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-008d-account-create-update-mw8ch"] Mar 20 13:41:59 crc kubenswrapper[4690]: E0320 13:41:59.372807 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9b79858b-d182-4740-aba6-ed9fb668f22c" containerName="init" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.372820 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9b79858b-d182-4740-aba6-ed9fb668f22c" containerName="init" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.372992 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="9b79858b-d182-4740-aba6-ed9fb668f22c" containerName="init" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.373486 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.375422 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.380165 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-2dbfj"] Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.382355 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2dbfj" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.389797 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-008d-account-create-update-mw8ch"] Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.415024 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2dbfj"] Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.471334 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a16960c-b84b-4b25-b51a-9f5dad54e473-operator-scripts\") pod \"keystone-008d-account-create-update-mw8ch\" (UID: \"0a16960c-b84b-4b25-b51a-9f5dad54e473\") " pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.471649 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk7tj\" (UniqueName: \"kubernetes.io/projected/0a16960c-b84b-4b25-b51a-9f5dad54e473-kube-api-access-dk7tj\") pod \"keystone-008d-account-create-update-mw8ch\" (UID: \"0a16960c-b84b-4b25-b51a-9f5dad54e473\") " pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.471815 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjnkb\" (UniqueName: \"kubernetes.io/projected/c82e598b-5f84-4e68-aa8f-5682574fcae9-kube-api-access-cjnkb\") pod \"keystone-db-create-2dbfj\" (UID: \"c82e598b-5f84-4e68-aa8f-5682574fcae9\") " pod="openstack/keystone-db-create-2dbfj" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.471935 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c82e598b-5f84-4e68-aa8f-5682574fcae9-operator-scripts\") pod \"keystone-db-create-2dbfj\" (UID: \"c82e598b-5f84-4e68-aa8f-5682574fcae9\") " pod="openstack/keystone-db-create-2dbfj" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.473932 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-27tlv"] Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.475060 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-27tlv" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.482143 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-27tlv"] Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.553326 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-899c-account-create-update-pd277"] Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.554574 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.558255 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.573416 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a16960c-b84b-4b25-b51a-9f5dad54e473-operator-scripts\") pod \"keystone-008d-account-create-update-mw8ch\" (UID: \"0a16960c-b84b-4b25-b51a-9f5dad54e473\") " pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.573488 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pn8r5\" (UniqueName: \"kubernetes.io/projected/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-kube-api-access-pn8r5\") pod \"placement-db-create-27tlv\" (UID: \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\") " pod="openstack/placement-db-create-27tlv" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.573514 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-operator-scripts\") pod \"placement-db-create-27tlv\" (UID: \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\") " pod="openstack/placement-db-create-27tlv" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.573545 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk7tj\" (UniqueName: \"kubernetes.io/projected/0a16960c-b84b-4b25-b51a-9f5dad54e473-kube-api-access-dk7tj\") pod \"keystone-008d-account-create-update-mw8ch\" (UID: \"0a16960c-b84b-4b25-b51a-9f5dad54e473\") " pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.573604 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-operator-scripts\") pod \"placement-899c-account-create-update-pd277\" (UID: \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\") " pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.573626 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjnkb\" (UniqueName: \"kubernetes.io/projected/c82e598b-5f84-4e68-aa8f-5682574fcae9-kube-api-access-cjnkb\") pod \"keystone-db-create-2dbfj\" (UID: \"c82e598b-5f84-4e68-aa8f-5682574fcae9\") " pod="openstack/keystone-db-create-2dbfj" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.573652 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c82e598b-5f84-4e68-aa8f-5682574fcae9-operator-scripts\") pod \"keystone-db-create-2dbfj\" (UID: \"c82e598b-5f84-4e68-aa8f-5682574fcae9\") " pod="openstack/keystone-db-create-2dbfj" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.573710 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsw4r\" (UniqueName: \"kubernetes.io/projected/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-kube-api-access-fsw4r\") pod \"placement-899c-account-create-update-pd277\" (UID: \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\") " pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.574670 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a16960c-b84b-4b25-b51a-9f5dad54e473-operator-scripts\") pod \"keystone-008d-account-create-update-mw8ch\" (UID: \"0a16960c-b84b-4b25-b51a-9f5dad54e473\") " pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.575816 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c82e598b-5f84-4e68-aa8f-5682574fcae9-operator-scripts\") pod \"keystone-db-create-2dbfj\" (UID: \"c82e598b-5f84-4e68-aa8f-5682574fcae9\") " pod="openstack/keystone-db-create-2dbfj" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.576177 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-899c-account-create-update-pd277"] Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.595922 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjnkb\" (UniqueName: \"kubernetes.io/projected/c82e598b-5f84-4e68-aa8f-5682574fcae9-kube-api-access-cjnkb\") pod \"keystone-db-create-2dbfj\" (UID: \"c82e598b-5f84-4e68-aa8f-5682574fcae9\") " pod="openstack/keystone-db-create-2dbfj" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.596146 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk7tj\" (UniqueName: \"kubernetes.io/projected/0a16960c-b84b-4b25-b51a-9f5dad54e473-kube-api-access-dk7tj\") pod \"keystone-008d-account-create-update-mw8ch\" (UID: \"0a16960c-b84b-4b25-b51a-9f5dad54e473\") " pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.674577 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-operator-scripts\") pod \"placement-899c-account-create-update-pd277\" (UID: \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\") " pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.674672 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsw4r\" (UniqueName: \"kubernetes.io/projected/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-kube-api-access-fsw4r\") pod \"placement-899c-account-create-update-pd277\" (UID: \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\") " pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.674725 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pn8r5\" (UniqueName: \"kubernetes.io/projected/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-kube-api-access-pn8r5\") pod \"placement-db-create-27tlv\" (UID: \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\") " pod="openstack/placement-db-create-27tlv" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.674753 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-operator-scripts\") pod \"placement-db-create-27tlv\" (UID: \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\") " pod="openstack/placement-db-create-27tlv" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.675705 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-operator-scripts\") pod \"placement-db-create-27tlv\" (UID: \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\") " pod="openstack/placement-db-create-27tlv" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.676314 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-operator-scripts\") pod \"placement-899c-account-create-update-pd277\" (UID: \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\") " pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.696915 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsw4r\" (UniqueName: \"kubernetes.io/projected/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-kube-api-access-fsw4r\") pod \"placement-899c-account-create-update-pd277\" (UID: \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\") " pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.697538 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pn8r5\" (UniqueName: \"kubernetes.io/projected/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-kube-api-access-pn8r5\") pod \"placement-db-create-27tlv\" (UID: \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\") " pod="openstack/placement-db-create-27tlv" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.707192 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.720234 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2dbfj" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.793241 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-27tlv" Mar 20 13:41:59 crc kubenswrapper[4690]: I0320 13:41:59.884352 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.136106 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566902-9qjq2"] Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.137288 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566902-9qjq2" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.139916 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.141392 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.144072 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.150483 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566902-9qjq2"] Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.172412 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-008d-account-create-update-mw8ch"] Mar 20 13:42:00 crc kubenswrapper[4690]: W0320 13:42:00.173728 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0a16960c_b84b_4b25_b51a_9f5dad54e473.slice/crio-d3416a73a0f7ce21ef814a2a0d6bd8e19a5b8ed2064e5d2813c2ceefe57ca298 WatchSource:0}: Error finding container d3416a73a0f7ce21ef814a2a0d6bd8e19a5b8ed2064e5d2813c2ceefe57ca298: Status 404 returned error can't find the container with id d3416a73a0f7ce21ef814a2a0d6bd8e19a5b8ed2064e5d2813c2ceefe57ca298 Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.181239 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n9776\" (UniqueName: \"kubernetes.io/projected/19081a20-821d-49bd-abd4-7788cab48b2d-kube-api-access-n9776\") pod \"auto-csr-approver-29566902-9qjq2\" (UID: \"19081a20-821d-49bd-abd4-7788cab48b2d\") " pod="openshift-infra/auto-csr-approver-29566902-9qjq2" Mar 20 13:42:00 crc kubenswrapper[4690]: W0320 13:42:00.260103 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc82e598b_5f84_4e68_aa8f_5682574fcae9.slice/crio-617f85c8c50829cc7ccd7080a263a5a8c03f5164c085aba56d41688d1cb7a816 WatchSource:0}: Error finding container 617f85c8c50829cc7ccd7080a263a5a8c03f5164c085aba56d41688d1cb7a816: Status 404 returned error can't find the container with id 617f85c8c50829cc7ccd7080a263a5a8c03f5164c085aba56d41688d1cb7a816 Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.260575 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-2dbfj"] Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.282372 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n9776\" (UniqueName: \"kubernetes.io/projected/19081a20-821d-49bd-abd4-7788cab48b2d-kube-api-access-n9776\") pod \"auto-csr-approver-29566902-9qjq2\" (UID: \"19081a20-821d-49bd-abd4-7788cab48b2d\") " pod="openshift-infra/auto-csr-approver-29566902-9qjq2" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.302369 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n9776\" (UniqueName: \"kubernetes.io/projected/19081a20-821d-49bd-abd4-7788cab48b2d-kube-api-access-n9776\") pod \"auto-csr-approver-29566902-9qjq2\" (UID: \"19081a20-821d-49bd-abd4-7788cab48b2d\") " pod="openshift-infra/auto-csr-approver-29566902-9qjq2" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.349914 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-27tlv"] Mar 20 13:42:00 crc kubenswrapper[4690]: W0320 13:42:00.352704 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd1f9d0e8_10b6_4aa6_ae3a_890f9e521253.slice/crio-12bf28eec0ec83ebf2cb4f2acb279b58f7917310d598be18410bbcfd0e2bf17e WatchSource:0}: Error finding container 12bf28eec0ec83ebf2cb4f2acb279b58f7917310d598be18410bbcfd0e2bf17e: Status 404 returned error can't find the container with id 12bf28eec0ec83ebf2cb4f2acb279b58f7917310d598be18410bbcfd0e2bf17e Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.424816 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-899c-account-create-update-pd277"] Mar 20 13:42:00 crc kubenswrapper[4690]: W0320 13:42:00.433093 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcc9e0393_9cc0_4120_8661_31fc5e0a77f6.slice/crio-ed93eb447c991e8898875a21353534c8061fef1dc4c15f604224411e8b5758f0 WatchSource:0}: Error finding container ed93eb447c991e8898875a21353534c8061fef1dc4c15f604224411e8b5758f0: Status 404 returned error can't find the container with id ed93eb447c991e8898875a21353534c8061fef1dc4c15f604224411e8b5758f0 Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.459612 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566902-9qjq2" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.585610 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-xtj9s"] Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.586598 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-xtj9s" podUID="bf577129-6184-4843-9b9d-7a5fa9dd5088" containerName="dnsmasq-dns" containerID="cri-o://c44ff7644150eb309cd029aeca83d66520292aa37e28f894f5c46dbd4972faa7" gracePeriod=10 Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.588331 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.677506 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-2cfpf"] Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.694601 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-2cfpf"] Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.694726 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.810691 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvprh\" (UniqueName: \"kubernetes.io/projected/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-kube-api-access-xvprh\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.810846 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.810911 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.810937 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.810971 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-config\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.912902 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvprh\" (UniqueName: \"kubernetes.io/projected/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-kube-api-access-xvprh\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.913010 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.913046 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.913103 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.913131 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-config\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.914762 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.914872 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.914980 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-config\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.916187 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:00 crc kubenswrapper[4690]: I0320 13:42:00.939670 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvprh\" (UniqueName: \"kubernetes.io/projected/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-kube-api-access-xvprh\") pod \"dnsmasq-dns-b8fbc5445-2cfpf\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.052283 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-xtj9s" podUID="bf577129-6184-4843-9b9d-7a5fa9dd5088" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: connect: connection refused" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.058246 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.140814 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c9508cc5-d6ca-435f-949a-790440ed5f11","Type":"ContainerStarted","Data":"32923691aec687dd148bd0f2913887a5203003bde19d6d7cce2ef3d71a4d0f58"} Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.142196 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-27tlv" event={"ID":"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253","Type":"ContainerStarted","Data":"12bf28eec0ec83ebf2cb4f2acb279b58f7917310d598be18410bbcfd0e2bf17e"} Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.143360 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2dbfj" event={"ID":"c82e598b-5f84-4e68-aa8f-5682574fcae9","Type":"ContainerStarted","Data":"617f85c8c50829cc7ccd7080a263a5a8c03f5164c085aba56d41688d1cb7a816"} Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.144577 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-899c-account-create-update-pd277" event={"ID":"cc9e0393-9cc0-4120-8661-31fc5e0a77f6","Type":"ContainerStarted","Data":"ed93eb447c991e8898875a21353534c8061fef1dc4c15f604224411e8b5758f0"} Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.145415 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-008d-account-create-update-mw8ch" event={"ID":"0a16960c-b84b-4b25-b51a-9f5dad54e473","Type":"ContainerStarted","Data":"d3416a73a0f7ce21ef814a2a0d6bd8e19a5b8ed2064e5d2813c2ceefe57ca298"} Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.292477 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566902-9qjq2"] Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.302862 4690 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.573527 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-2cfpf"] Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.838403 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.848015 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.850135 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.850502 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.850609 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.850740 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-bmtv5" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.874185 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.931535 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jdng9\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-kube-api-access-jdng9\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.931575 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.931594 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-lock\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.931615 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.931636 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:01 crc kubenswrapper[4690]: I0320 13:42:01.931704 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-cache\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.035514 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-cache\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.035583 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jdng9\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-kube-api-access-jdng9\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.035602 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.035619 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-lock\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.035645 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.035666 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: E0320 13:42:02.035816 4690 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Mar 20 13:42:02 crc kubenswrapper[4690]: E0320 13:42:02.035829 4690 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Mar 20 13:42:02 crc kubenswrapper[4690]: E0320 13:42:02.035909 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift podName:3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a nodeName:}" failed. No retries permitted until 2026-03-20 13:42:02.535892118 +0000 UTC m=+1168.825492061 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift") pod "swift-storage-0" (UID: "3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a") : configmap "swift-ring-files" not found Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.036686 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.036939 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-cache\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.036694 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-lock\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.043582 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-combined-ca-bundle\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.052866 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jdng9\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-kube-api-access-jdng9\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.073084 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.153798 4690 generic.go:334] "Generic (PLEG): container finished" podID="d1f9d0e8-10b6-4aa6-ae3a-890f9e521253" containerID="5e06537acde668849b70cae7ff14df88ac5b57ab12328030c1622dacee40a743" exitCode=0 Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.153895 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-27tlv" event={"ID":"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253","Type":"ContainerDied","Data":"5e06537acde668849b70cae7ff14df88ac5b57ab12328030c1622dacee40a743"} Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.155578 4690 generic.go:334] "Generic (PLEG): container finished" podID="c82e598b-5f84-4e68-aa8f-5682574fcae9" containerID="29992538275ba381fbefd3befbca7b0eba29ec6b5dd30f5755ceb43330847122" exitCode=0 Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.155624 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2dbfj" event={"ID":"c82e598b-5f84-4e68-aa8f-5682574fcae9","Type":"ContainerDied","Data":"29992538275ba381fbefd3befbca7b0eba29ec6b5dd30f5755ceb43330847122"} Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.158568 4690 generic.go:334] "Generic (PLEG): container finished" podID="bf577129-6184-4843-9b9d-7a5fa9dd5088" containerID="c44ff7644150eb309cd029aeca83d66520292aa37e28f894f5c46dbd4972faa7" exitCode=0 Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.158629 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-xtj9s" event={"ID":"bf577129-6184-4843-9b9d-7a5fa9dd5088","Type":"ContainerDied","Data":"c44ff7644150eb309cd029aeca83d66520292aa37e28f894f5c46dbd4972faa7"} Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.160659 4690 generic.go:334] "Generic (PLEG): container finished" podID="cc9e0393-9cc0-4120-8661-31fc5e0a77f6" containerID="eb111293e600d9a3624e22f5aa97930efc27c52255ef7dcb28e16000e9de567a" exitCode=0 Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.160704 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-899c-account-create-update-pd277" event={"ID":"cc9e0393-9cc0-4120-8661-31fc5e0a77f6","Type":"ContainerDied","Data":"eb111293e600d9a3624e22f5aa97930efc27c52255ef7dcb28e16000e9de567a"} Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.162549 4690 generic.go:334] "Generic (PLEG): container finished" podID="a05f19fb-0bd7-418d-bf6d-1dec04ed9529" containerID="8dc8d7a1885f0b543a5fbd2aa653f921c59ee6a7c27ec1799c8730f9450f2d18" exitCode=0 Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.162606 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" event={"ID":"a05f19fb-0bd7-418d-bf6d-1dec04ed9529","Type":"ContainerDied","Data":"8dc8d7a1885f0b543a5fbd2aa653f921c59ee6a7c27ec1799c8730f9450f2d18"} Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.162633 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" event={"ID":"a05f19fb-0bd7-418d-bf6d-1dec04ed9529","Type":"ContainerStarted","Data":"c8f281fb1d4e57267455ab0a80d77698e56c24632aaa4ca4cfb12db00f950240"} Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.168977 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b6c3ab56-9d3c-431c-a697-d6df19b67a21","Type":"ContainerStarted","Data":"4a558a53d5a7c50e845544545240ecc9aa85af75ed3990048851148c43c08581"} Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.173931 4690 generic.go:334] "Generic (PLEG): container finished" podID="0a16960c-b84b-4b25-b51a-9f5dad54e473" containerID="787fc292eebb92aeea979bdffb348af9a0800bbeffe4f971a739a369a6d012f4" exitCode=0 Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.174002 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-008d-account-create-update-mw8ch" event={"ID":"0a16960c-b84b-4b25-b51a-9f5dad54e473","Type":"ContainerDied","Data":"787fc292eebb92aeea979bdffb348af9a0800bbeffe4f971a739a369a6d012f4"} Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.176084 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566902-9qjq2" event={"ID":"19081a20-821d-49bd-abd4-7788cab48b2d","Type":"ContainerStarted","Data":"05356dc6a7a88340e79c326625b24a7c6a637a9ce714490c64f29bc26b40d284"} Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.327586 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.338665 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-config\") pod \"bf577129-6184-4843-9b9d-7a5fa9dd5088\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.338731 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-dns-svc\") pod \"bf577129-6184-4843-9b9d-7a5fa9dd5088\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.338756 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkw27\" (UniqueName: \"kubernetes.io/projected/bf577129-6184-4843-9b9d-7a5fa9dd5088-kube-api-access-zkw27\") pod \"bf577129-6184-4843-9b9d-7a5fa9dd5088\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.338916 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-sb\") pod \"bf577129-6184-4843-9b9d-7a5fa9dd5088\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.338988 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-nb\") pod \"bf577129-6184-4843-9b9d-7a5fa9dd5088\" (UID: \"bf577129-6184-4843-9b9d-7a5fa9dd5088\") " Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.347157 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf577129-6184-4843-9b9d-7a5fa9dd5088-kube-api-access-zkw27" (OuterVolumeSpecName: "kube-api-access-zkw27") pod "bf577129-6184-4843-9b9d-7a5fa9dd5088" (UID: "bf577129-6184-4843-9b9d-7a5fa9dd5088"). InnerVolumeSpecName "kube-api-access-zkw27". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.428889 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bf577129-6184-4843-9b9d-7a5fa9dd5088" (UID: "bf577129-6184-4843-9b9d-7a5fa9dd5088"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.437410 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-config" (OuterVolumeSpecName: "config") pod "bf577129-6184-4843-9b9d-7a5fa9dd5088" (UID: "bf577129-6184-4843-9b9d-7a5fa9dd5088"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.439745 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bf577129-6184-4843-9b9d-7a5fa9dd5088" (UID: "bf577129-6184-4843-9b9d-7a5fa9dd5088"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.440892 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bf577129-6184-4843-9b9d-7a5fa9dd5088" (UID: "bf577129-6184-4843-9b9d-7a5fa9dd5088"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.441264 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.441304 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.441317 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.441333 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkw27\" (UniqueName: \"kubernetes.io/projected/bf577129-6184-4843-9b9d-7a5fa9dd5088-kube-api-access-zkw27\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.441348 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bf577129-6184-4843-9b9d-7a5fa9dd5088-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:02 crc kubenswrapper[4690]: I0320 13:42:02.543102 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:02 crc kubenswrapper[4690]: E0320 13:42:02.543352 4690 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Mar 20 13:42:02 crc kubenswrapper[4690]: E0320 13:42:02.543389 4690 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Mar 20 13:42:02 crc kubenswrapper[4690]: E0320 13:42:02.543459 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift podName:3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a nodeName:}" failed. No retries permitted until 2026-03-20 13:42:03.543435142 +0000 UTC m=+1169.833035075 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift") pod "swift-storage-0" (UID: "3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a") : configmap "swift-ring-files" not found Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.186200 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8","Type":"ContainerStarted","Data":"2f002c3bf86a924080e29b33f9004dce3dbb5ec17f628beb107e46c082a48fa1"} Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.189280 4690 generic.go:334] "Generic (PLEG): container finished" podID="19081a20-821d-49bd-abd4-7788cab48b2d" containerID="e91cd3393898810db0bb699a1399eeabaa19cb53d3cb6758114ea9b999e302c2" exitCode=0 Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.189352 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566902-9qjq2" event={"ID":"19081a20-821d-49bd-abd4-7788cab48b2d","Type":"ContainerDied","Data":"e91cd3393898810db0bb699a1399eeabaa19cb53d3cb6758114ea9b999e302c2"} Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.191707 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-xtj9s" event={"ID":"bf577129-6184-4843-9b9d-7a5fa9dd5088","Type":"ContainerDied","Data":"fe623659ec8ae4accc0a445ab5578ecaefd8df0291bbd63c87bdc3e3eecb26cf"} Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.191773 4690 scope.go:117] "RemoveContainer" containerID="c44ff7644150eb309cd029aeca83d66520292aa37e28f894f5c46dbd4972faa7" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.191979 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-xtj9s" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.197302 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" event={"ID":"a05f19fb-0bd7-418d-bf6d-1dec04ed9529","Type":"ContainerStarted","Data":"8769f13c1a88f9d95d1f94ce081df243e151b4a351b2e7701b1afb329da51bee"} Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.197525 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.199417 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d3e883d8-973f-4e69-a13f-175f1904a203","Type":"ContainerStarted","Data":"51297d0654a5eeb4092b6a63f2abd1a703efedb7d756791f44adfa9e0b6f7860"} Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.199951 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.215318 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=5.917634949 podStartE2EDuration="40.215268211s" podCreationTimestamp="2026-03-20 13:41:23 +0000 UTC" firstStartedPulling="2026-03-20 13:41:27.654946255 +0000 UTC m=+1133.944546198" lastFinishedPulling="2026-03-20 13:42:01.952579517 +0000 UTC m=+1168.242179460" observedRunningTime="2026-03-20 13:42:03.209949088 +0000 UTC m=+1169.499549051" watchObservedRunningTime="2026-03-20 13:42:03.215268211 +0000 UTC m=+1169.504868184" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.229672 4690 scope.go:117] "RemoveContainer" containerID="2a446c7c379728d0065b842473cd79b04ce32c5675052987db2e92b88636aeab" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.266603 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" podStartSLOduration=3.266546972 podStartE2EDuration="3.266546972s" podCreationTimestamp="2026-03-20 13:42:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:42:03.253275341 +0000 UTC m=+1169.542875284" watchObservedRunningTime="2026-03-20 13:42:03.266546972 +0000 UTC m=+1169.556146915" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.307504 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=8.947839986 podStartE2EDuration="43.307468786s" podCreationTimestamp="2026-03-20 13:41:20 +0000 UTC" firstStartedPulling="2026-03-20 13:41:27.668065902 +0000 UTC m=+1133.957665845" lastFinishedPulling="2026-03-20 13:42:02.027694702 +0000 UTC m=+1168.317294645" observedRunningTime="2026-03-20 13:42:03.274395837 +0000 UTC m=+1169.563995790" watchObservedRunningTime="2026-03-20 13:42:03.307468786 +0000 UTC m=+1169.597068729" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.322255 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-xtj9s"] Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.333435 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-xtj9s"] Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.352390 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-thbgv"] Mar 20 13:42:03 crc kubenswrapper[4690]: E0320 13:42:03.352690 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf577129-6184-4843-9b9d-7a5fa9dd5088" containerName="init" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.352702 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf577129-6184-4843-9b9d-7a5fa9dd5088" containerName="init" Mar 20 13:42:03 crc kubenswrapper[4690]: E0320 13:42:03.352727 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf577129-6184-4843-9b9d-7a5fa9dd5088" containerName="dnsmasq-dns" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.352734 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf577129-6184-4843-9b9d-7a5fa9dd5088" containerName="dnsmasq-dns" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.353788 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf577129-6184-4843-9b9d-7a5fa9dd5088" containerName="dnsmasq-dns" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.354316 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-thbgv" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.358831 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr8n9\" (UniqueName: \"kubernetes.io/projected/f89adcb7-be07-48cd-8e10-0c8509a96029-kube-api-access-xr8n9\") pod \"glance-db-create-thbgv\" (UID: \"f89adcb7-be07-48cd-8e10-0c8509a96029\") " pod="openstack/glance-db-create-thbgv" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.358959 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f89adcb7-be07-48cd-8e10-0c8509a96029-operator-scripts\") pod \"glance-db-create-thbgv\" (UID: \"f89adcb7-be07-48cd-8e10-0c8509a96029\") " pod="openstack/glance-db-create-thbgv" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.366267 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-thbgv"] Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.418244 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-245d-account-create-update-gsx28"] Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.421036 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.424185 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.426616 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-245d-account-create-update-gsx28"] Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.461028 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg24f\" (UniqueName: \"kubernetes.io/projected/57aa5abf-4617-4b31-8a02-2721982d912c-kube-api-access-xg24f\") pod \"glance-245d-account-create-update-gsx28\" (UID: \"57aa5abf-4617-4b31-8a02-2721982d912c\") " pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.461132 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr8n9\" (UniqueName: \"kubernetes.io/projected/f89adcb7-be07-48cd-8e10-0c8509a96029-kube-api-access-xr8n9\") pod \"glance-db-create-thbgv\" (UID: \"f89adcb7-be07-48cd-8e10-0c8509a96029\") " pod="openstack/glance-db-create-thbgv" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.461183 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f89adcb7-be07-48cd-8e10-0c8509a96029-operator-scripts\") pod \"glance-db-create-thbgv\" (UID: \"f89adcb7-be07-48cd-8e10-0c8509a96029\") " pod="openstack/glance-db-create-thbgv" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.461231 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57aa5abf-4617-4b31-8a02-2721982d912c-operator-scripts\") pod \"glance-245d-account-create-update-gsx28\" (UID: \"57aa5abf-4617-4b31-8a02-2721982d912c\") " pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.463051 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f89adcb7-be07-48cd-8e10-0c8509a96029-operator-scripts\") pod \"glance-db-create-thbgv\" (UID: \"f89adcb7-be07-48cd-8e10-0c8509a96029\") " pod="openstack/glance-db-create-thbgv" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.482511 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr8n9\" (UniqueName: \"kubernetes.io/projected/f89adcb7-be07-48cd-8e10-0c8509a96029-kube-api-access-xr8n9\") pod \"glance-db-create-thbgv\" (UID: \"f89adcb7-be07-48cd-8e10-0c8509a96029\") " pod="openstack/glance-db-create-thbgv" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.563648 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57aa5abf-4617-4b31-8a02-2721982d912c-operator-scripts\") pod \"glance-245d-account-create-update-gsx28\" (UID: \"57aa5abf-4617-4b31-8a02-2721982d912c\") " pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.563718 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg24f\" (UniqueName: \"kubernetes.io/projected/57aa5abf-4617-4b31-8a02-2721982d912c-kube-api-access-xg24f\") pod \"glance-245d-account-create-update-gsx28\" (UID: \"57aa5abf-4617-4b31-8a02-2721982d912c\") " pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.563771 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:03 crc kubenswrapper[4690]: E0320 13:42:03.563926 4690 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Mar 20 13:42:03 crc kubenswrapper[4690]: E0320 13:42:03.563940 4690 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Mar 20 13:42:03 crc kubenswrapper[4690]: E0320 13:42:03.563981 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift podName:3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a nodeName:}" failed. No retries permitted until 2026-03-20 13:42:05.563967807 +0000 UTC m=+1171.853567750 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift") pod "swift-storage-0" (UID: "3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a") : configmap "swift-ring-files" not found Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.565247 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57aa5abf-4617-4b31-8a02-2721982d912c-operator-scripts\") pod \"glance-245d-account-create-update-gsx28\" (UID: \"57aa5abf-4617-4b31-8a02-2721982d912c\") " pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.632945 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg24f\" (UniqueName: \"kubernetes.io/projected/57aa5abf-4617-4b31-8a02-2721982d912c-kube-api-access-xg24f\") pod \"glance-245d-account-create-update-gsx28\" (UID: \"57aa5abf-4617-4b31-8a02-2721982d912c\") " pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.671588 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-thbgv" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.740209 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.842677 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.846508 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.853899 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-27tlv" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.863216 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2dbfj" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.927836 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.969562 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjnkb\" (UniqueName: \"kubernetes.io/projected/c82e598b-5f84-4e68-aa8f-5682574fcae9-kube-api-access-cjnkb\") pod \"c82e598b-5f84-4e68-aa8f-5682574fcae9\" (UID: \"c82e598b-5f84-4e68-aa8f-5682574fcae9\") " Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.969616 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-operator-scripts\") pod \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\" (UID: \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\") " Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.969668 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsw4r\" (UniqueName: \"kubernetes.io/projected/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-kube-api-access-fsw4r\") pod \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\" (UID: \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\") " Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.970672 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a16960c-b84b-4b25-b51a-9f5dad54e473-operator-scripts\") pod \"0a16960c-b84b-4b25-b51a-9f5dad54e473\" (UID: \"0a16960c-b84b-4b25-b51a-9f5dad54e473\") " Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.970929 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-operator-scripts\") pod \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\" (UID: \"cc9e0393-9cc0-4120-8661-31fc5e0a77f6\") " Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.970958 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pn8r5\" (UniqueName: \"kubernetes.io/projected/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-kube-api-access-pn8r5\") pod \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\" (UID: \"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253\") " Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.971003 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c82e598b-5f84-4e68-aa8f-5682574fcae9-operator-scripts\") pod \"c82e598b-5f84-4e68-aa8f-5682574fcae9\" (UID: \"c82e598b-5f84-4e68-aa8f-5682574fcae9\") " Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.971020 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dk7tj\" (UniqueName: \"kubernetes.io/projected/0a16960c-b84b-4b25-b51a-9f5dad54e473-kube-api-access-dk7tj\") pod \"0a16960c-b84b-4b25-b51a-9f5dad54e473\" (UID: \"0a16960c-b84b-4b25-b51a-9f5dad54e473\") " Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.970799 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d1f9d0e8-10b6-4aa6-ae3a-890f9e521253" (UID: "d1f9d0e8-10b6-4aa6-ae3a-890f9e521253"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.971437 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0a16960c-b84b-4b25-b51a-9f5dad54e473-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0a16960c-b84b-4b25-b51a-9f5dad54e473" (UID: "0a16960c-b84b-4b25-b51a-9f5dad54e473"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.972405 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c82e598b-5f84-4e68-aa8f-5682574fcae9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c82e598b-5f84-4e68-aa8f-5682574fcae9" (UID: "c82e598b-5f84-4e68-aa8f-5682574fcae9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.974863 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "cc9e0393-9cc0-4120-8661-31fc5e0a77f6" (UID: "cc9e0393-9cc0-4120-8661-31fc5e0a77f6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.979017 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-kube-api-access-fsw4r" (OuterVolumeSpecName: "kube-api-access-fsw4r") pod "cc9e0393-9cc0-4120-8661-31fc5e0a77f6" (UID: "cc9e0393-9cc0-4120-8661-31fc5e0a77f6"). InnerVolumeSpecName "kube-api-access-fsw4r". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.979111 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a16960c-b84b-4b25-b51a-9f5dad54e473-kube-api-access-dk7tj" (OuterVolumeSpecName: "kube-api-access-dk7tj") pod "0a16960c-b84b-4b25-b51a-9f5dad54e473" (UID: "0a16960c-b84b-4b25-b51a-9f5dad54e473"). InnerVolumeSpecName "kube-api-access-dk7tj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.979179 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-kube-api-access-pn8r5" (OuterVolumeSpecName: "kube-api-access-pn8r5") pod "d1f9d0e8-10b6-4aa6-ae3a-890f9e521253" (UID: "d1f9d0e8-10b6-4aa6-ae3a-890f9e521253"). InnerVolumeSpecName "kube-api-access-pn8r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:03 crc kubenswrapper[4690]: I0320 13:42:03.979201 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c82e598b-5f84-4e68-aa8f-5682574fcae9-kube-api-access-cjnkb" (OuterVolumeSpecName: "kube-api-access-cjnkb") pod "c82e598b-5f84-4e68-aa8f-5682574fcae9" (UID: "c82e598b-5f84-4e68-aa8f-5682574fcae9"). InnerVolumeSpecName "kube-api-access-cjnkb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.072266 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.072298 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pn8r5\" (UniqueName: \"kubernetes.io/projected/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-kube-api-access-pn8r5\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.072308 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c82e598b-5f84-4e68-aa8f-5682574fcae9-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.072317 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dk7tj\" (UniqueName: \"kubernetes.io/projected/0a16960c-b84b-4b25-b51a-9f5dad54e473-kube-api-access-dk7tj\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.072327 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjnkb\" (UniqueName: \"kubernetes.io/projected/c82e598b-5f84-4e68-aa8f-5682574fcae9-kube-api-access-cjnkb\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.072336 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.072344 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsw4r\" (UniqueName: \"kubernetes.io/projected/cc9e0393-9cc0-4120-8661-31fc5e0a77f6-kube-api-access-fsw4r\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.072353 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0a16960c-b84b-4b25-b51a-9f5dad54e473-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.144714 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-thbgv"] Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.208554 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-899c-account-create-update-pd277" event={"ID":"cc9e0393-9cc0-4120-8661-31fc5e0a77f6","Type":"ContainerDied","Data":"ed93eb447c991e8898875a21353534c8061fef1dc4c15f604224411e8b5758f0"} Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.208588 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ed93eb447c991e8898875a21353534c8061fef1dc4c15f604224411e8b5758f0" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.208605 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-899c-account-create-update-pd277" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.210094 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-008d-account-create-update-mw8ch" event={"ID":"0a16960c-b84b-4b25-b51a-9f5dad54e473","Type":"ContainerDied","Data":"d3416a73a0f7ce21ef814a2a0d6bd8e19a5b8ed2064e5d2813c2ceefe57ca298"} Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.210128 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-008d-account-create-update-mw8ch" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.210134 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d3416a73a0f7ce21ef814a2a0d6bd8e19a5b8ed2064e5d2813c2ceefe57ca298" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.211223 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-27tlv" event={"ID":"d1f9d0e8-10b6-4aa6-ae3a-890f9e521253","Type":"ContainerDied","Data":"12bf28eec0ec83ebf2cb4f2acb279b58f7917310d598be18410bbcfd0e2bf17e"} Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.211244 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="12bf28eec0ec83ebf2cb4f2acb279b58f7917310d598be18410bbcfd0e2bf17e" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.211295 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-27tlv" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.217979 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-2dbfj" event={"ID":"c82e598b-5f84-4e68-aa8f-5682574fcae9","Type":"ContainerDied","Data":"617f85c8c50829cc7ccd7080a263a5a8c03f5164c085aba56d41688d1cb7a816"} Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.218010 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="617f85c8c50829cc7ccd7080a263a5a8c03f5164c085aba56d41688d1cb7a816" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.218059 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-2dbfj" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.224129 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-thbgv" event={"ID":"f89adcb7-be07-48cd-8e10-0c8509a96029","Type":"ContainerStarted","Data":"7fa1edba6b7f23453efcf25bef03dd770658630b52699638ec08bbe581e29c61"} Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.260525 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-245d-account-create-update-gsx28"] Mar 20 13:42:04 crc kubenswrapper[4690]: W0320 13:42:04.269293 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57aa5abf_4617_4b31_8a02_2721982d912c.slice/crio-c755408f2af46599af25e8d7686f43f415c38a92263b94db500c426e639206a4 WatchSource:0}: Error finding container c755408f2af46599af25e8d7686f43f415c38a92263b94db500c426e639206a4: Status 404 returned error can't find the container with id c755408f2af46599af25e8d7686f43f415c38a92263b94db500c426e639206a4 Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.444419 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf577129-6184-4843-9b9d-7a5fa9dd5088" path="/var/lib/kubelet/pods/bf577129-6184-4843-9b9d-7a5fa9dd5088/volumes" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.558086 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566902-9qjq2" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.684326 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n9776\" (UniqueName: \"kubernetes.io/projected/19081a20-821d-49bd-abd4-7788cab48b2d-kube-api-access-n9776\") pod \"19081a20-821d-49bd-abd4-7788cab48b2d\" (UID: \"19081a20-821d-49bd-abd4-7788cab48b2d\") " Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.692049 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19081a20-821d-49bd-abd4-7788cab48b2d-kube-api-access-n9776" (OuterVolumeSpecName: "kube-api-access-n9776") pod "19081a20-821d-49bd-abd4-7788cab48b2d" (UID: "19081a20-821d-49bd-abd4-7788cab48b2d"). InnerVolumeSpecName "kube-api-access-n9776". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.785944 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n9776\" (UniqueName: \"kubernetes.io/projected/19081a20-821d-49bd-abd4-7788cab48b2d-kube-api-access-n9776\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.927266 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.971229 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-4n9wx"] Mar 20 13:42:04 crc kubenswrapper[4690]: E0320 13:42:04.971595 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cc9e0393-9cc0-4120-8661-31fc5e0a77f6" containerName="mariadb-account-create-update" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.971613 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="cc9e0393-9cc0-4120-8661-31fc5e0a77f6" containerName="mariadb-account-create-update" Mar 20 13:42:04 crc kubenswrapper[4690]: E0320 13:42:04.971634 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d1f9d0e8-10b6-4aa6-ae3a-890f9e521253" containerName="mariadb-database-create" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.971642 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d1f9d0e8-10b6-4aa6-ae3a-890f9e521253" containerName="mariadb-database-create" Mar 20 13:42:04 crc kubenswrapper[4690]: E0320 13:42:04.971664 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c82e598b-5f84-4e68-aa8f-5682574fcae9" containerName="mariadb-database-create" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.971673 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="c82e598b-5f84-4e68-aa8f-5682574fcae9" containerName="mariadb-database-create" Mar 20 13:42:04 crc kubenswrapper[4690]: E0320 13:42:04.971698 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19081a20-821d-49bd-abd4-7788cab48b2d" containerName="oc" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.971706 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="19081a20-821d-49bd-abd4-7788cab48b2d" containerName="oc" Mar 20 13:42:04 crc kubenswrapper[4690]: E0320 13:42:04.971730 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a16960c-b84b-4b25-b51a-9f5dad54e473" containerName="mariadb-account-create-update" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.971738 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a16960c-b84b-4b25-b51a-9f5dad54e473" containerName="mariadb-account-create-update" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.974167 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="19081a20-821d-49bd-abd4-7788cab48b2d" containerName="oc" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.974210 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="c82e598b-5f84-4e68-aa8f-5682574fcae9" containerName="mariadb-database-create" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.974221 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a16960c-b84b-4b25-b51a-9f5dad54e473" containerName="mariadb-account-create-update" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.974231 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d1f9d0e8-10b6-4aa6-ae3a-890f9e521253" containerName="mariadb-database-create" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.974249 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="cc9e0393-9cc0-4120-8661-31fc5e0a77f6" containerName="mariadb-account-create-update" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.974897 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:04 crc kubenswrapper[4690]: I0320 13:42:04.976344 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.002295 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-4n9wx"] Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.118908 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqjp8\" (UniqueName: \"kubernetes.io/projected/81421dba-62e1-4ae9-b37b-fa6495308eb0-kube-api-access-xqjp8\") pod \"root-account-create-update-4n9wx\" (UID: \"81421dba-62e1-4ae9-b37b-fa6495308eb0\") " pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.119001 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81421dba-62e1-4ae9-b37b-fa6495308eb0-operator-scripts\") pod \"root-account-create-update-4n9wx\" (UID: \"81421dba-62e1-4ae9-b37b-fa6495308eb0\") " pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.220521 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81421dba-62e1-4ae9-b37b-fa6495308eb0-operator-scripts\") pod \"root-account-create-update-4n9wx\" (UID: \"81421dba-62e1-4ae9-b37b-fa6495308eb0\") " pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.220683 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqjp8\" (UniqueName: \"kubernetes.io/projected/81421dba-62e1-4ae9-b37b-fa6495308eb0-kube-api-access-xqjp8\") pod \"root-account-create-update-4n9wx\" (UID: \"81421dba-62e1-4ae9-b37b-fa6495308eb0\") " pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.222026 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81421dba-62e1-4ae9-b37b-fa6495308eb0-operator-scripts\") pod \"root-account-create-update-4n9wx\" (UID: \"81421dba-62e1-4ae9-b37b-fa6495308eb0\") " pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.234531 4690 generic.go:334] "Generic (PLEG): container finished" podID="57aa5abf-4617-4b31-8a02-2721982d912c" containerID="cf9d7b43b434b8c79a344c14e0d52da636df4451b12a8483fc4cbf45930803fb" exitCode=0 Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.234616 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-245d-account-create-update-gsx28" event={"ID":"57aa5abf-4617-4b31-8a02-2721982d912c","Type":"ContainerDied","Data":"cf9d7b43b434b8c79a344c14e0d52da636df4451b12a8483fc4cbf45930803fb"} Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.234663 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-245d-account-create-update-gsx28" event={"ID":"57aa5abf-4617-4b31-8a02-2721982d912c","Type":"ContainerStarted","Data":"c755408f2af46599af25e8d7686f43f415c38a92263b94db500c426e639206a4"} Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.235947 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566902-9qjq2" event={"ID":"19081a20-821d-49bd-abd4-7788cab48b2d","Type":"ContainerDied","Data":"05356dc6a7a88340e79c326625b24a7c6a637a9ce714490c64f29bc26b40d284"} Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.235968 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05356dc6a7a88340e79c326625b24a7c6a637a9ce714490c64f29bc26b40d284" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.235997 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566902-9qjq2" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.237536 4690 generic.go:334] "Generic (PLEG): container finished" podID="f89adcb7-be07-48cd-8e10-0c8509a96029" containerID="e9f6051f9cfa372895e19ab05fd56c78be80d581d9ade62c1924639c83211f6c" exitCode=0 Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.237566 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqjp8\" (UniqueName: \"kubernetes.io/projected/81421dba-62e1-4ae9-b37b-fa6495308eb0-kube-api-access-xqjp8\") pod \"root-account-create-update-4n9wx\" (UID: \"81421dba-62e1-4ae9-b37b-fa6495308eb0\") " pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.238085 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-thbgv" event={"ID":"f89adcb7-be07-48cd-8e10-0c8509a96029","Type":"ContainerDied","Data":"e9f6051f9cfa372895e19ab05fd56c78be80d581d9ade62c1924639c83211f6c"} Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.308933 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.623314 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566896-kj56m"] Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.629874 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566896-kj56m"] Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.632405 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:05 crc kubenswrapper[4690]: E0320 13:42:05.632577 4690 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Mar 20 13:42:05 crc kubenswrapper[4690]: E0320 13:42:05.632590 4690 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Mar 20 13:42:05 crc kubenswrapper[4690]: E0320 13:42:05.632631 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift podName:3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a nodeName:}" failed. No retries permitted until 2026-03-20 13:42:09.632617908 +0000 UTC m=+1175.922217851 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift") pod "swift-storage-0" (UID: "3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a") : configmap "swift-ring-files" not found Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.699721 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-57rps"] Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.700787 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.702455 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.702681 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.703113 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.707572 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-57rps"] Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.835680 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-dispersionconf\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.839481 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-etc-swift\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.840225 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-ring-data-devices\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.840640 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-scripts\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.840946 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-combined-ca-bundle\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.841126 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-swiftconf\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.841341 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7hbr\" (UniqueName: \"kubernetes.io/projected/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-kube-api-access-l7hbr\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.844663 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-4n9wx"] Mar 20 13:42:05 crc kubenswrapper[4690]: W0320 13:42:05.847406 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod81421dba_62e1_4ae9_b37b_fa6495308eb0.slice/crio-f9635df8b21531b8ae6a20cbc15bb0c94a8d4d706617f090d88130abcbe3a92c WatchSource:0}: Error finding container f9635df8b21531b8ae6a20cbc15bb0c94a8d4d706617f090d88130abcbe3a92c: Status 404 returned error can't find the container with id f9635df8b21531b8ae6a20cbc15bb0c94a8d4d706617f090d88130abcbe3a92c Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.943312 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-combined-ca-bundle\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.943388 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-swiftconf\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.943499 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7hbr\" (UniqueName: \"kubernetes.io/projected/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-kube-api-access-l7hbr\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.943776 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-dispersionconf\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.943896 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-etc-swift\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.943997 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-ring-data-devices\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.944119 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-scripts\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.944411 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-etc-swift\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.945291 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-ring-data-devices\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.946034 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-scripts\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.948798 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-dispersionconf\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.954612 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-swiftconf\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.954955 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-combined-ca-bundle\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:05 crc kubenswrapper[4690]: I0320 13:42:05.961751 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7hbr\" (UniqueName: \"kubernetes.io/projected/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-kube-api-access-l7hbr\") pod \"swift-ring-rebalance-57rps\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.021218 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.246102 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4n9wx" event={"ID":"81421dba-62e1-4ae9-b37b-fa6495308eb0","Type":"ContainerStarted","Data":"477bd78e723b494d30bce3e15f338f131c639fe6b1fc266214f4c44a6c3857d4"} Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.246250 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4n9wx" event={"ID":"81421dba-62e1-4ae9-b37b-fa6495308eb0","Type":"ContainerStarted","Data":"f9635df8b21531b8ae6a20cbc15bb0c94a8d4d706617f090d88130abcbe3a92c"} Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.266442 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/root-account-create-update-4n9wx" podStartSLOduration=2.266426096 podStartE2EDuration="2.266426096s" podCreationTimestamp="2026-03-20 13:42:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:42:06.259765385 +0000 UTC m=+1172.549365328" watchObservedRunningTime="2026-03-20 13:42:06.266426096 +0000 UTC m=+1172.556026039" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.424045 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19b3591a-cb0d-4249-968a-06e6c9891eb1" path="/var/lib/kubelet/pods/19b3591a-cb0d-4249-968a-06e6c9891eb1/volumes" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.464075 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-57rps"] Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.571388 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.613319 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-thbgv" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.667305 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57aa5abf-4617-4b31-8a02-2721982d912c-operator-scripts\") pod \"57aa5abf-4617-4b31-8a02-2721982d912c\" (UID: \"57aa5abf-4617-4b31-8a02-2721982d912c\") " Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.667447 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg24f\" (UniqueName: \"kubernetes.io/projected/57aa5abf-4617-4b31-8a02-2721982d912c-kube-api-access-xg24f\") pod \"57aa5abf-4617-4b31-8a02-2721982d912c\" (UID: \"57aa5abf-4617-4b31-8a02-2721982d912c\") " Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.668188 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/57aa5abf-4617-4b31-8a02-2721982d912c-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "57aa5abf-4617-4b31-8a02-2721982d912c" (UID: "57aa5abf-4617-4b31-8a02-2721982d912c"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.674327 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57aa5abf-4617-4b31-8a02-2721982d912c-kube-api-access-xg24f" (OuterVolumeSpecName: "kube-api-access-xg24f") pod "57aa5abf-4617-4b31-8a02-2721982d912c" (UID: "57aa5abf-4617-4b31-8a02-2721982d912c"). InnerVolumeSpecName "kube-api-access-xg24f". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.768661 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xr8n9\" (UniqueName: \"kubernetes.io/projected/f89adcb7-be07-48cd-8e10-0c8509a96029-kube-api-access-xr8n9\") pod \"f89adcb7-be07-48cd-8e10-0c8509a96029\" (UID: \"f89adcb7-be07-48cd-8e10-0c8509a96029\") " Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.768911 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f89adcb7-be07-48cd-8e10-0c8509a96029-operator-scripts\") pod \"f89adcb7-be07-48cd-8e10-0c8509a96029\" (UID: \"f89adcb7-be07-48cd-8e10-0c8509a96029\") " Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.769446 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg24f\" (UniqueName: \"kubernetes.io/projected/57aa5abf-4617-4b31-8a02-2721982d912c-kube-api-access-xg24f\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.769479 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/57aa5abf-4617-4b31-8a02-2721982d912c-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.769439 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f89adcb7-be07-48cd-8e10-0c8509a96029-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f89adcb7-be07-48cd-8e10-0c8509a96029" (UID: "f89adcb7-be07-48cd-8e10-0c8509a96029"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.771416 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f89adcb7-be07-48cd-8e10-0c8509a96029-kube-api-access-xr8n9" (OuterVolumeSpecName: "kube-api-access-xr8n9") pod "f89adcb7-be07-48cd-8e10-0c8509a96029" (UID: "f89adcb7-be07-48cd-8e10-0c8509a96029"). InnerVolumeSpecName "kube-api-access-xr8n9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.871155 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f89adcb7-be07-48cd-8e10-0c8509a96029-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.871201 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xr8n9\" (UniqueName: \"kubernetes.io/projected/f89adcb7-be07-48cd-8e10-0c8509a96029-kube-api-access-xr8n9\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:06 crc kubenswrapper[4690]: I0320 13:42:06.997710 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Mar 20 13:42:07 crc kubenswrapper[4690]: I0320 13:42:07.257006 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-thbgv" Mar 20 13:42:07 crc kubenswrapper[4690]: I0320 13:42:07.257039 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-thbgv" event={"ID":"f89adcb7-be07-48cd-8e10-0c8509a96029","Type":"ContainerDied","Data":"7fa1edba6b7f23453efcf25bef03dd770658630b52699638ec08bbe581e29c61"} Mar 20 13:42:07 crc kubenswrapper[4690]: I0320 13:42:07.257114 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fa1edba6b7f23453efcf25bef03dd770658630b52699638ec08bbe581e29c61" Mar 20 13:42:07 crc kubenswrapper[4690]: I0320 13:42:07.258296 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-57rps" event={"ID":"0b26393a-1d00-4b21-a3b2-74518b7f0b3d","Type":"ContainerStarted","Data":"ad918447d99a49dece6049c908b136ebea3826c88336e9c09f28d2aed178bf1b"} Mar 20 13:42:07 crc kubenswrapper[4690]: I0320 13:42:07.260287 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-245d-account-create-update-gsx28" event={"ID":"57aa5abf-4617-4b31-8a02-2721982d912c","Type":"ContainerDied","Data":"c755408f2af46599af25e8d7686f43f415c38a92263b94db500c426e639206a4"} Mar 20 13:42:07 crc kubenswrapper[4690]: I0320 13:42:07.260343 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-245d-account-create-update-gsx28" Mar 20 13:42:07 crc kubenswrapper[4690]: I0320 13:42:07.260354 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c755408f2af46599af25e8d7686f43f415c38a92263b94db500c426e639206a4" Mar 20 13:42:07 crc kubenswrapper[4690]: I0320 13:42:07.270560 4690 generic.go:334] "Generic (PLEG): container finished" podID="81421dba-62e1-4ae9-b37b-fa6495308eb0" containerID="477bd78e723b494d30bce3e15f338f131c639fe6b1fc266214f4c44a6c3857d4" exitCode=0 Mar 20 13:42:07 crc kubenswrapper[4690]: I0320 13:42:07.270613 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4n9wx" event={"ID":"81421dba-62e1-4ae9-b37b-fa6495308eb0","Type":"ContainerDied","Data":"477bd78e723b494d30bce3e15f338f131c639fe6b1fc266214f4c44a6c3857d4"} Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.571412 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-rf8w7"] Mar 20 13:42:08 crc kubenswrapper[4690]: E0320 13:42:08.572248 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57aa5abf-4617-4b31-8a02-2721982d912c" containerName="mariadb-account-create-update" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.572267 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="57aa5abf-4617-4b31-8a02-2721982d912c" containerName="mariadb-account-create-update" Mar 20 13:42:08 crc kubenswrapper[4690]: E0320 13:42:08.572283 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f89adcb7-be07-48cd-8e10-0c8509a96029" containerName="mariadb-database-create" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.572291 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f89adcb7-be07-48cd-8e10-0c8509a96029" containerName="mariadb-database-create" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.572503 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f89adcb7-be07-48cd-8e10-0c8509a96029" containerName="mariadb-database-create" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.572524 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="57aa5abf-4617-4b31-8a02-2721982d912c" containerName="mariadb-account-create-update" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.573314 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.575831 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.576238 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-bch99" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.581872 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rf8w7"] Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.705222 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcsjr\" (UniqueName: \"kubernetes.io/projected/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-kube-api-access-fcsjr\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.705482 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-config-data\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.705807 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-db-sync-config-data\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.705915 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-combined-ca-bundle\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.807759 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-db-sync-config-data\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.807806 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-combined-ca-bundle\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.807869 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcsjr\" (UniqueName: \"kubernetes.io/projected/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-kube-api-access-fcsjr\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.807918 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-config-data\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.813494 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-db-sync-config-data\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.814704 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-config-data\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.814919 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-combined-ca-bundle\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.826206 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcsjr\" (UniqueName: \"kubernetes.io/projected/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-kube-api-access-fcsjr\") pod \"glance-db-sync-rf8w7\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:08 crc kubenswrapper[4690]: I0320 13:42:08.891932 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:09 crc kubenswrapper[4690]: I0320 13:42:09.553176 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:09 crc kubenswrapper[4690]: I0320 13:42:09.618914 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81421dba-62e1-4ae9-b37b-fa6495308eb0-operator-scripts\") pod \"81421dba-62e1-4ae9-b37b-fa6495308eb0\" (UID: \"81421dba-62e1-4ae9-b37b-fa6495308eb0\") " Mar 20 13:42:09 crc kubenswrapper[4690]: I0320 13:42:09.618988 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqjp8\" (UniqueName: \"kubernetes.io/projected/81421dba-62e1-4ae9-b37b-fa6495308eb0-kube-api-access-xqjp8\") pod \"81421dba-62e1-4ae9-b37b-fa6495308eb0\" (UID: \"81421dba-62e1-4ae9-b37b-fa6495308eb0\") " Mar 20 13:42:09 crc kubenswrapper[4690]: I0320 13:42:09.620768 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/81421dba-62e1-4ae9-b37b-fa6495308eb0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "81421dba-62e1-4ae9-b37b-fa6495308eb0" (UID: "81421dba-62e1-4ae9-b37b-fa6495308eb0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:09 crc kubenswrapper[4690]: I0320 13:42:09.624699 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81421dba-62e1-4ae9-b37b-fa6495308eb0-kube-api-access-xqjp8" (OuterVolumeSpecName: "kube-api-access-xqjp8") pod "81421dba-62e1-4ae9-b37b-fa6495308eb0" (UID: "81421dba-62e1-4ae9-b37b-fa6495308eb0"). InnerVolumeSpecName "kube-api-access-xqjp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:09 crc kubenswrapper[4690]: I0320 13:42:09.720967 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:09 crc kubenswrapper[4690]: E0320 13:42:09.721340 4690 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Mar 20 13:42:09 crc kubenswrapper[4690]: E0320 13:42:09.721386 4690 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Mar 20 13:42:09 crc kubenswrapper[4690]: I0320 13:42:09.721432 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/81421dba-62e1-4ae9-b37b-fa6495308eb0-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:09 crc kubenswrapper[4690]: I0320 13:42:09.721448 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqjp8\" (UniqueName: \"kubernetes.io/projected/81421dba-62e1-4ae9-b37b-fa6495308eb0-kube-api-access-xqjp8\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:09 crc kubenswrapper[4690]: E0320 13:42:09.721493 4690 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift podName:3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a nodeName:}" failed. No retries permitted until 2026-03-20 13:42:17.721466141 +0000 UTC m=+1184.011066114 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift") pod "swift-storage-0" (UID: "3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a") : configmap "swift-ring-files" not found Mar 20 13:42:09 crc kubenswrapper[4690]: I0320 13:42:09.989823 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.088691 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-rf8w7"] Mar 20 13:42:10 crc kubenswrapper[4690]: W0320 13:42:10.092642 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5d5fc4d_8398_4ccd_bf8e_69bbd639e23e.slice/crio-8d7019ff2e188180d617874aaf959fcb1479049503bf65b3a726e3c6d437aa1e WatchSource:0}: Error finding container 8d7019ff2e188180d617874aaf959fcb1479049503bf65b3a726e3c6d437aa1e: Status 404 returned error can't find the container with id 8d7019ff2e188180d617874aaf959fcb1479049503bf65b3a726e3c6d437aa1e Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.241673 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Mar 20 13:42:10 crc kubenswrapper[4690]: E0320 13:42:10.242401 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81421dba-62e1-4ae9-b37b-fa6495308eb0" containerName="mariadb-account-create-update" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.242432 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="81421dba-62e1-4ae9-b37b-fa6495308eb0" containerName="mariadb-account-create-update" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.242657 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="81421dba-62e1-4ae9-b37b-fa6495308eb0" containerName="mariadb-account-create-update" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.243813 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.247249 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.250513 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.250551 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-65jm9" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.250931 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.253705 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.295607 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-57rps" event={"ID":"0b26393a-1d00-4b21-a3b2-74518b7f0b3d","Type":"ContainerStarted","Data":"5679c33ff85a593bb06202a815f9315a89d3836ad9e82c78324dc5fb15b4f4f1"} Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.296674 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rf8w7" event={"ID":"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e","Type":"ContainerStarted","Data":"8d7019ff2e188180d617874aaf959fcb1479049503bf65b3a726e3c6d437aa1e"} Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.298265 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-4n9wx" event={"ID":"81421dba-62e1-4ae9-b37b-fa6495308eb0","Type":"ContainerDied","Data":"f9635df8b21531b8ae6a20cbc15bb0c94a8d4d706617f090d88130abcbe3a92c"} Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.298301 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9635df8b21531b8ae6a20cbc15bb0c94a8d4d706617f090d88130abcbe3a92c" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.298363 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-4n9wx" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.320367 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-57rps" podStartSLOduration=2.270828869 podStartE2EDuration="5.320344907s" podCreationTimestamp="2026-03-20 13:42:05 +0000 UTC" firstStartedPulling="2026-03-20 13:42:06.495447008 +0000 UTC m=+1172.785046951" lastFinishedPulling="2026-03-20 13:42:09.544963046 +0000 UTC m=+1175.834562989" observedRunningTime="2026-03-20 13:42:10.316054494 +0000 UTC m=+1176.605654437" watchObservedRunningTime="2026-03-20 13:42:10.320344907 +0000 UTC m=+1176.609944850" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.336192 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/edbd8493-2301-46b3-b4ba-b60511e31302-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.336277 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8wkdn\" (UniqueName: \"kubernetes.io/projected/edbd8493-2301-46b3-b4ba-b60511e31302-kube-api-access-8wkdn\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.336308 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/edbd8493-2301-46b3-b4ba-b60511e31302-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.336382 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/edbd8493-2301-46b3-b4ba-b60511e31302-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.336502 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edbd8493-2301-46b3-b4ba-b60511e31302-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.336617 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/edbd8493-2301-46b3-b4ba-b60511e31302-scripts\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.336675 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edbd8493-2301-46b3-b4ba-b60511e31302-config\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.438318 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edbd8493-2301-46b3-b4ba-b60511e31302-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.438409 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/edbd8493-2301-46b3-b4ba-b60511e31302-scripts\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.438435 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edbd8493-2301-46b3-b4ba-b60511e31302-config\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.438572 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/edbd8493-2301-46b3-b4ba-b60511e31302-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.438615 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8wkdn\" (UniqueName: \"kubernetes.io/projected/edbd8493-2301-46b3-b4ba-b60511e31302-kube-api-access-8wkdn\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.438639 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/edbd8493-2301-46b3-b4ba-b60511e31302-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.438676 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/edbd8493-2301-46b3-b4ba-b60511e31302-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.439425 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/edbd8493-2301-46b3-b4ba-b60511e31302-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.439436 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/edbd8493-2301-46b3-b4ba-b60511e31302-config\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.439565 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/edbd8493-2301-46b3-b4ba-b60511e31302-scripts\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.443360 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/edbd8493-2301-46b3-b4ba-b60511e31302-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.443612 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/edbd8493-2301-46b3-b4ba-b60511e31302-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.454593 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/edbd8493-2301-46b3-b4ba-b60511e31302-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.455827 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8wkdn\" (UniqueName: \"kubernetes.io/projected/edbd8493-2301-46b3-b4ba-b60511e31302-kube-api-access-8wkdn\") pod \"ovn-northd-0\" (UID: \"edbd8493-2301-46b3-b4ba-b60511e31302\") " pod="openstack/ovn-northd-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.560722 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Mar 20 13:42:10 crc kubenswrapper[4690]: I0320 13:42:10.562137 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.061672 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.155947 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gp2rt"] Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.156200 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" podUID="c515e2c8-7d1c-4010-989a-1c61f02deea8" containerName="dnsmasq-dns" containerID="cri-o://65f76795183883680dc1dd91f84a05caf0196b25313444a056aaaf595ce9535b" gracePeriod=10 Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.172329 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.308388 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"edbd8493-2301-46b3-b4ba-b60511e31302","Type":"ContainerStarted","Data":"acef08cac5dccc3cbac5b41085784401148cbc427f924b6cfbeb245b744762ba"} Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.313449 4690 generic.go:334] "Generic (PLEG): container finished" podID="c515e2c8-7d1c-4010-989a-1c61f02deea8" containerID="65f76795183883680dc1dd91f84a05caf0196b25313444a056aaaf595ce9535b" exitCode=0 Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.314013 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" event={"ID":"c515e2c8-7d1c-4010-989a-1c61f02deea8","Type":"ContainerDied","Data":"65f76795183883680dc1dd91f84a05caf0196b25313444a056aaaf595ce9535b"} Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.588193 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-4n9wx"] Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.598673 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-4n9wx"] Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.637118 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.765142 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qldwj\" (UniqueName: \"kubernetes.io/projected/c515e2c8-7d1c-4010-989a-1c61f02deea8-kube-api-access-qldwj\") pod \"c515e2c8-7d1c-4010-989a-1c61f02deea8\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.765235 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-dns-svc\") pod \"c515e2c8-7d1c-4010-989a-1c61f02deea8\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.765301 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-config\") pod \"c515e2c8-7d1c-4010-989a-1c61f02deea8\" (UID: \"c515e2c8-7d1c-4010-989a-1c61f02deea8\") " Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.771228 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c515e2c8-7d1c-4010-989a-1c61f02deea8-kube-api-access-qldwj" (OuterVolumeSpecName: "kube-api-access-qldwj") pod "c515e2c8-7d1c-4010-989a-1c61f02deea8" (UID: "c515e2c8-7d1c-4010-989a-1c61f02deea8"). InnerVolumeSpecName "kube-api-access-qldwj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.806419 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-config" (OuterVolumeSpecName: "config") pod "c515e2c8-7d1c-4010-989a-1c61f02deea8" (UID: "c515e2c8-7d1c-4010-989a-1c61f02deea8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.812643 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c515e2c8-7d1c-4010-989a-1c61f02deea8" (UID: "c515e2c8-7d1c-4010-989a-1c61f02deea8"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.867535 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.867572 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qldwj\" (UniqueName: \"kubernetes.io/projected/c515e2c8-7d1c-4010-989a-1c61f02deea8-kube-api-access-qldwj\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:11 crc kubenswrapper[4690]: I0320 13:42:11.867585 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c515e2c8-7d1c-4010-989a-1c61f02deea8-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:12 crc kubenswrapper[4690]: I0320 13:42:12.328270 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" event={"ID":"c515e2c8-7d1c-4010-989a-1c61f02deea8","Type":"ContainerDied","Data":"cde3e5dbc338c6a04a079af56e3208622c8715ce5eced5b655ab1bcc8a1ec650"} Mar 20 13:42:12 crc kubenswrapper[4690]: I0320 13:42:12.328339 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-gp2rt" Mar 20 13:42:12 crc kubenswrapper[4690]: I0320 13:42:12.328634 4690 scope.go:117] "RemoveContainer" containerID="65f76795183883680dc1dd91f84a05caf0196b25313444a056aaaf595ce9535b" Mar 20 13:42:12 crc kubenswrapper[4690]: I0320 13:42:12.359303 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gp2rt"] Mar 20 13:42:12 crc kubenswrapper[4690]: I0320 13:42:12.361414 4690 scope.go:117] "RemoveContainer" containerID="9beb1a329e642e500dfdcc999ed1e1af26106709e23cfee04faa04630361170d" Mar 20 13:42:12 crc kubenswrapper[4690]: I0320 13:42:12.365796 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-gp2rt"] Mar 20 13:42:12 crc kubenswrapper[4690]: I0320 13:42:12.428825 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81421dba-62e1-4ae9-b37b-fa6495308eb0" path="/var/lib/kubelet/pods/81421dba-62e1-4ae9-b37b-fa6495308eb0/volumes" Mar 20 13:42:12 crc kubenswrapper[4690]: I0320 13:42:12.429533 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c515e2c8-7d1c-4010-989a-1c61f02deea8" path="/var/lib/kubelet/pods/c515e2c8-7d1c-4010-989a-1c61f02deea8/volumes" Mar 20 13:42:13 crc kubenswrapper[4690]: I0320 13:42:13.353622 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"edbd8493-2301-46b3-b4ba-b60511e31302","Type":"ContainerStarted","Data":"1b5aa3944a0850210aa15544526dd3157f4684d6533da9e649868921249bbc90"} Mar 20 13:42:13 crc kubenswrapper[4690]: I0320 13:42:13.353918 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"edbd8493-2301-46b3-b4ba-b60511e31302","Type":"ContainerStarted","Data":"9b7bd41b5ffa60474997377ddfbd76f2c205551932201f448bc281b5d06e9730"} Mar 20 13:42:13 crc kubenswrapper[4690]: I0320 13:42:13.354135 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Mar 20 13:42:13 crc kubenswrapper[4690]: I0320 13:42:13.372505 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.117542408 podStartE2EDuration="3.372485951s" podCreationTimestamp="2026-03-20 13:42:10 +0000 UTC" firstStartedPulling="2026-03-20 13:42:11.18264768 +0000 UTC m=+1177.472247623" lastFinishedPulling="2026-03-20 13:42:12.437591233 +0000 UTC m=+1178.727191166" observedRunningTime="2026-03-20 13:42:13.36933706 +0000 UTC m=+1179.658937003" watchObservedRunningTime="2026-03-20 13:42:13.372485951 +0000 UTC m=+1179.662085904" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.012643 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-rhzkk"] Mar 20 13:42:15 crc kubenswrapper[4690]: E0320 13:42:15.013348 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c515e2c8-7d1c-4010-989a-1c61f02deea8" containerName="init" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.013363 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="c515e2c8-7d1c-4010-989a-1c61f02deea8" containerName="init" Mar 20 13:42:15 crc kubenswrapper[4690]: E0320 13:42:15.013386 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c515e2c8-7d1c-4010-989a-1c61f02deea8" containerName="dnsmasq-dns" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.013395 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="c515e2c8-7d1c-4010-989a-1c61f02deea8" containerName="dnsmasq-dns" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.013594 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="c515e2c8-7d1c-4010-989a-1c61f02deea8" containerName="dnsmasq-dns" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.014227 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.018177 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-mariadb-root-db-secret" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.022972 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rhzkk"] Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.122214 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cls26\" (UniqueName: \"kubernetes.io/projected/d185ca61-daf3-4a53-8604-c79c1546d8e5-kube-api-access-cls26\") pod \"root-account-create-update-rhzkk\" (UID: \"d185ca61-daf3-4a53-8604-c79c1546d8e5\") " pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.122336 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d185ca61-daf3-4a53-8604-c79c1546d8e5-operator-scripts\") pod \"root-account-create-update-rhzkk\" (UID: \"d185ca61-daf3-4a53-8604-c79c1546d8e5\") " pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.223956 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cls26\" (UniqueName: \"kubernetes.io/projected/d185ca61-daf3-4a53-8604-c79c1546d8e5-kube-api-access-cls26\") pod \"root-account-create-update-rhzkk\" (UID: \"d185ca61-daf3-4a53-8604-c79c1546d8e5\") " pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.224066 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d185ca61-daf3-4a53-8604-c79c1546d8e5-operator-scripts\") pod \"root-account-create-update-rhzkk\" (UID: \"d185ca61-daf3-4a53-8604-c79c1546d8e5\") " pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.225141 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d185ca61-daf3-4a53-8604-c79c1546d8e5-operator-scripts\") pod \"root-account-create-update-rhzkk\" (UID: \"d185ca61-daf3-4a53-8604-c79c1546d8e5\") " pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.248321 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cls26\" (UniqueName: \"kubernetes.io/projected/d185ca61-daf3-4a53-8604-c79c1546d8e5-kube-api-access-cls26\") pod \"root-account-create-update-rhzkk\" (UID: \"d185ca61-daf3-4a53-8604-c79c1546d8e5\") " pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:15 crc kubenswrapper[4690]: I0320 13:42:15.367909 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:16 crc kubenswrapper[4690]: I0320 13:42:16.386791 4690 generic.go:334] "Generic (PLEG): container finished" podID="0b26393a-1d00-4b21-a3b2-74518b7f0b3d" containerID="5679c33ff85a593bb06202a815f9315a89d3836ad9e82c78324dc5fb15b4f4f1" exitCode=0 Mar 20 13:42:16 crc kubenswrapper[4690]: I0320 13:42:16.386977 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-57rps" event={"ID":"0b26393a-1d00-4b21-a3b2-74518b7f0b3d","Type":"ContainerDied","Data":"5679c33ff85a593bb06202a815f9315a89d3836ad9e82c78324dc5fb15b4f4f1"} Mar 20 13:42:17 crc kubenswrapper[4690]: I0320 13:42:17.811794 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:17 crc kubenswrapper[4690]: I0320 13:42:17.820215 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a-etc-swift\") pod \"swift-storage-0\" (UID: \"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a\") " pod="openstack/swift-storage-0" Mar 20 13:42:18 crc kubenswrapper[4690]: I0320 13:42:18.076197 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Mar 20 13:42:18 crc kubenswrapper[4690]: I0320 13:42:18.788442 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-mxmrl" podUID="9e0061bd-d72c-4aeb-86f0-154e0cccfe15" containerName="ovn-controller" probeResult="failure" output=< Mar 20 13:42:18 crc kubenswrapper[4690]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Mar 20 13:42:18 crc kubenswrapper[4690]: > Mar 20 13:42:18 crc kubenswrapper[4690]: I0320 13:42:18.791325 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:42:18 crc kubenswrapper[4690]: I0320 13:42:18.792798 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-s6fhs" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.008527 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-mxmrl-config-rq2k9"] Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.010309 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.015722 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mxmrl-config-rq2k9"] Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.021820 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.137396 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-log-ovn\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.137575 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-additional-scripts\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.137630 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run-ovn\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.137670 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.137707 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-scripts\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.137761 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9d69\" (UniqueName: \"kubernetes.io/projected/c45f41c1-0e51-4534-acde-9c5ab2597126-kube-api-access-x9d69\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.239398 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-log-ovn\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.239489 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-additional-scripts\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.239513 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run-ovn\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.239535 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-scripts\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.239554 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.239583 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9d69\" (UniqueName: \"kubernetes.io/projected/c45f41c1-0e51-4534-acde-9c5ab2597126-kube-api-access-x9d69\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.239792 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-log-ovn\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.239823 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run-ovn\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.239839 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.240768 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-additional-scripts\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.245098 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-scripts\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.259992 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9d69\" (UniqueName: \"kubernetes.io/projected/c45f41c1-0e51-4534-acde-9c5ab2597126-kube-api-access-x9d69\") pod \"ovn-controller-mxmrl-config-rq2k9\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:19 crc kubenswrapper[4690]: I0320 13:42:19.345926 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.429627 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-57rps" event={"ID":"0b26393a-1d00-4b21-a3b2-74518b7f0b3d","Type":"ContainerDied","Data":"ad918447d99a49dece6049c908b136ebea3826c88336e9c09f28d2aed178bf1b"} Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.429986 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ad918447d99a49dece6049c908b136ebea3826c88336e9c09f28d2aed178bf1b" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.547132 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.679375 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-dispersionconf\") pod \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.679809 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-scripts\") pod \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.679836 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7hbr\" (UniqueName: \"kubernetes.io/projected/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-kube-api-access-l7hbr\") pod \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.679921 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-etc-swift\") pod \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.680057 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-swiftconf\") pod \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.680094 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-combined-ca-bundle\") pod \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.680137 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-ring-data-devices\") pod \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\" (UID: \"0b26393a-1d00-4b21-a3b2-74518b7f0b3d\") " Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.681345 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "0b26393a-1d00-4b21-a3b2-74518b7f0b3d" (UID: "0b26393a-1d00-4b21-a3b2-74518b7f0b3d"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.681552 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "0b26393a-1d00-4b21-a3b2-74518b7f0b3d" (UID: "0b26393a-1d00-4b21-a3b2-74518b7f0b3d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.684136 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-kube-api-access-l7hbr" (OuterVolumeSpecName: "kube-api-access-l7hbr") pod "0b26393a-1d00-4b21-a3b2-74518b7f0b3d" (UID: "0b26393a-1d00-4b21-a3b2-74518b7f0b3d"). InnerVolumeSpecName "kube-api-access-l7hbr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.688452 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "0b26393a-1d00-4b21-a3b2-74518b7f0b3d" (UID: "0b26393a-1d00-4b21-a3b2-74518b7f0b3d"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.699200 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-scripts" (OuterVolumeSpecName: "scripts") pod "0b26393a-1d00-4b21-a3b2-74518b7f0b3d" (UID: "0b26393a-1d00-4b21-a3b2-74518b7f0b3d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.702808 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "0b26393a-1d00-4b21-a3b2-74518b7f0b3d" (UID: "0b26393a-1d00-4b21-a3b2-74518b7f0b3d"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.707240 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0b26393a-1d00-4b21-a3b2-74518b7f0b3d" (UID: "0b26393a-1d00-4b21-a3b2-74518b7f0b3d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.781931 4690 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-ring-data-devices\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.781957 4690 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-dispersionconf\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.781966 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.781975 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7hbr\" (UniqueName: \"kubernetes.io/projected/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-kube-api-access-l7hbr\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.781984 4690 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-etc-swift\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.781992 4690 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-swiftconf\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.782000 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0b26393a-1d00-4b21-a3b2-74518b7f0b3d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.896265 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-rhzkk"] Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.903506 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-mxmrl-config-rq2k9"] Mar 20 13:42:21 crc kubenswrapper[4690]: W0320 13:42:21.918193 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd185ca61_daf3_4a53_8604_c79c1546d8e5.slice/crio-698941ca5450256a6af618a507ef59ffa996ceead703471238334a58485aa5de WatchSource:0}: Error finding container 698941ca5450256a6af618a507ef59ffa996ceead703471238334a58485aa5de: Status 404 returned error can't find the container with id 698941ca5450256a6af618a507ef59ffa996ceead703471238334a58485aa5de Mar 20 13:42:21 crc kubenswrapper[4690]: I0320 13:42:21.988992 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Mar 20 13:42:21 crc kubenswrapper[4690]: W0320 13:42:21.991071 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3f8a5e46_bb4f_498c_ac43_d9cfb5f1945a.slice/crio-1a8a5319eec8ba9abd40a78e371bb1c5d01f3c08dc2f2f7e45bbe5f90f2813e7 WatchSource:0}: Error finding container 1a8a5319eec8ba9abd40a78e371bb1c5d01f3c08dc2f2f7e45bbe5f90f2813e7: Status 404 returned error can't find the container with id 1a8a5319eec8ba9abd40a78e371bb1c5d01f3c08dc2f2f7e45bbe5f90f2813e7 Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.440287 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"1a8a5319eec8ba9abd40a78e371bb1c5d01f3c08dc2f2f7e45bbe5f90f2813e7"} Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.446661 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mxmrl-config-rq2k9" event={"ID":"c45f41c1-0e51-4534-acde-9c5ab2597126","Type":"ContainerStarted","Data":"46044e43cada2e61c44cca7fbbc48779ba086cc0491324f61348f1ca1ea54b4e"} Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.446697 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mxmrl-config-rq2k9" event={"ID":"c45f41c1-0e51-4534-acde-9c5ab2597126","Type":"ContainerStarted","Data":"a5042cf3393359ad6e09ad8a28e9d78691b628398a56e4487b29f3d172cac84f"} Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.452468 4690 generic.go:334] "Generic (PLEG): container finished" podID="d185ca61-daf3-4a53-8604-c79c1546d8e5" containerID="3b636293b7cd0a98237276b9603db1f236f6378697c96d31f033c995c5aec2b6" exitCode=0 Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.452555 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rhzkk" event={"ID":"d185ca61-daf3-4a53-8604-c79c1546d8e5","Type":"ContainerDied","Data":"3b636293b7cd0a98237276b9603db1f236f6378697c96d31f033c995c5aec2b6"} Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.452583 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rhzkk" event={"ID":"d185ca61-daf3-4a53-8604-c79c1546d8e5","Type":"ContainerStarted","Data":"698941ca5450256a6af618a507ef59ffa996ceead703471238334a58485aa5de"} Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.455101 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-57rps" Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.455112 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rf8w7" event={"ID":"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e","Type":"ContainerStarted","Data":"39394e082621b97880542a3f7bdd98c3e70f119252c72189dab79ebec56063d1"} Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.480686 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-mxmrl-config-rq2k9" podStartSLOduration=4.480667218 podStartE2EDuration="4.480667218s" podCreationTimestamp="2026-03-20 13:42:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:42:22.473482372 +0000 UTC m=+1188.763082315" watchObservedRunningTime="2026-03-20 13:42:22.480667218 +0000 UTC m=+1188.770267171" Mar 20 13:42:22 crc kubenswrapper[4690]: I0320 13:42:22.502470 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-rf8w7" podStartSLOduration=3.105518017 podStartE2EDuration="14.502452293s" podCreationTimestamp="2026-03-20 13:42:08 +0000 UTC" firstStartedPulling="2026-03-20 13:42:10.09534247 +0000 UTC m=+1176.384942413" lastFinishedPulling="2026-03-20 13:42:21.492276746 +0000 UTC m=+1187.781876689" observedRunningTime="2026-03-20 13:42:22.49327539 +0000 UTC m=+1188.782875353" watchObservedRunningTime="2026-03-20 13:42:22.502452293 +0000 UTC m=+1188.792052246" Mar 20 13:42:23 crc kubenswrapper[4690]: I0320 13:42:23.467568 4690 generic.go:334] "Generic (PLEG): container finished" podID="c45f41c1-0e51-4534-acde-9c5ab2597126" containerID="46044e43cada2e61c44cca7fbbc48779ba086cc0491324f61348f1ca1ea54b4e" exitCode=0 Mar 20 13:42:23 crc kubenswrapper[4690]: I0320 13:42:23.467615 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mxmrl-config-rq2k9" event={"ID":"c45f41c1-0e51-4534-acde-9c5ab2597126","Type":"ContainerDied","Data":"46044e43cada2e61c44cca7fbbc48779ba086cc0491324f61348f1ca1ea54b4e"} Mar 20 13:42:23 crc kubenswrapper[4690]: I0320 13:42:23.471087 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"c00a1e911b3128060f55bca825b936daadc8c49835d2b8cdbdc4ad9d834d70a8"} Mar 20 13:42:23 crc kubenswrapper[4690]: I0320 13:42:23.770288 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-mxmrl" Mar 20 13:42:23 crc kubenswrapper[4690]: I0320 13:42:23.804435 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:23 crc kubenswrapper[4690]: I0320 13:42:23.930181 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d185ca61-daf3-4a53-8604-c79c1546d8e5-operator-scripts\") pod \"d185ca61-daf3-4a53-8604-c79c1546d8e5\" (UID: \"d185ca61-daf3-4a53-8604-c79c1546d8e5\") " Mar 20 13:42:23 crc kubenswrapper[4690]: I0320 13:42:23.930316 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cls26\" (UniqueName: \"kubernetes.io/projected/d185ca61-daf3-4a53-8604-c79c1546d8e5-kube-api-access-cls26\") pod \"d185ca61-daf3-4a53-8604-c79c1546d8e5\" (UID: \"d185ca61-daf3-4a53-8604-c79c1546d8e5\") " Mar 20 13:42:23 crc kubenswrapper[4690]: I0320 13:42:23.931759 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d185ca61-daf3-4a53-8604-c79c1546d8e5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d185ca61-daf3-4a53-8604-c79c1546d8e5" (UID: "d185ca61-daf3-4a53-8604-c79c1546d8e5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:23 crc kubenswrapper[4690]: I0320 13:42:23.939343 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d185ca61-daf3-4a53-8604-c79c1546d8e5-kube-api-access-cls26" (OuterVolumeSpecName: "kube-api-access-cls26") pod "d185ca61-daf3-4a53-8604-c79c1546d8e5" (UID: "d185ca61-daf3-4a53-8604-c79c1546d8e5"). InnerVolumeSpecName "kube-api-access-cls26". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:24 crc kubenswrapper[4690]: I0320 13:42:24.032413 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d185ca61-daf3-4a53-8604-c79c1546d8e5-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:24 crc kubenswrapper[4690]: I0320 13:42:24.032442 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cls26\" (UniqueName: \"kubernetes.io/projected/d185ca61-daf3-4a53-8604-c79c1546d8e5-kube-api-access-cls26\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:24 crc kubenswrapper[4690]: I0320 13:42:24.499025 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"7cbdf60d9095571a6255cfab1e3dc4142e9b76e301d11b77936757af6e337d4a"} Mar 20 13:42:24 crc kubenswrapper[4690]: I0320 13:42:24.499422 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"aae130756f983cf226bc482b945a64368dce944775218bc3a316db8fba510a9b"} Mar 20 13:42:24 crc kubenswrapper[4690]: I0320 13:42:24.499456 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"7feb5a3b886b4de4e4d672a03e5b62994d3b388dc8306bd025f86662b3683b9d"} Mar 20 13:42:24 crc kubenswrapper[4690]: I0320 13:42:24.501614 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-rhzkk" event={"ID":"d185ca61-daf3-4a53-8604-c79c1546d8e5","Type":"ContainerDied","Data":"698941ca5450256a6af618a507ef59ffa996ceead703471238334a58485aa5de"} Mar 20 13:42:24 crc kubenswrapper[4690]: I0320 13:42:24.501666 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="698941ca5450256a6af618a507ef59ffa996ceead703471238334a58485aa5de" Mar 20 13:42:24 crc kubenswrapper[4690]: I0320 13:42:24.501770 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-rhzkk" Mar 20 13:42:24 crc kubenswrapper[4690]: I0320 13:42:24.893126 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.051386 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-additional-scripts\") pod \"c45f41c1-0e51-4534-acde-9c5ab2597126\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.051920 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-log-ovn\") pod \"c45f41c1-0e51-4534-acde-9c5ab2597126\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.052080 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run-ovn\") pod \"c45f41c1-0e51-4534-acde-9c5ab2597126\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.052122 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x9d69\" (UniqueName: \"kubernetes.io/projected/c45f41c1-0e51-4534-acde-9c5ab2597126-kube-api-access-x9d69\") pod \"c45f41c1-0e51-4534-acde-9c5ab2597126\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.052151 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-scripts\") pod \"c45f41c1-0e51-4534-acde-9c5ab2597126\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.052228 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run\") pod \"c45f41c1-0e51-4534-acde-9c5ab2597126\" (UID: \"c45f41c1-0e51-4534-acde-9c5ab2597126\") " Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.052384 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "c45f41c1-0e51-4534-acde-9c5ab2597126" (UID: "c45f41c1-0e51-4534-acde-9c5ab2597126"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.052446 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run" (OuterVolumeSpecName: "var-run") pod "c45f41c1-0e51-4534-acde-9c5ab2597126" (UID: "c45f41c1-0e51-4534-acde-9c5ab2597126"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.052477 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "c45f41c1-0e51-4534-acde-9c5ab2597126" (UID: "c45f41c1-0e51-4534-acde-9c5ab2597126"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.052977 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "c45f41c1-0e51-4534-acde-9c5ab2597126" (UID: "c45f41c1-0e51-4534-acde-9c5ab2597126"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.052987 4690 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run-ovn\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.053047 4690 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-run\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.053293 4690 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/c45f41c1-0e51-4534-acde-9c5ab2597126-var-log-ovn\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.054490 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-scripts" (OuterVolumeSpecName: "scripts") pod "c45f41c1-0e51-4534-acde-9c5ab2597126" (UID: "c45f41c1-0e51-4534-acde-9c5ab2597126"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.060127 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c45f41c1-0e51-4534-acde-9c5ab2597126-kube-api-access-x9d69" (OuterVolumeSpecName: "kube-api-access-x9d69") pod "c45f41c1-0e51-4534-acde-9c5ab2597126" (UID: "c45f41c1-0e51-4534-acde-9c5ab2597126"). InnerVolumeSpecName "kube-api-access-x9d69". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.154692 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x9d69\" (UniqueName: \"kubernetes.io/projected/c45f41c1-0e51-4534-acde-9c5ab2597126-kube-api-access-x9d69\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.154740 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.154753 4690 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/c45f41c1-0e51-4534-acde-9c5ab2597126-additional-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.518274 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-mxmrl-config-rq2k9" event={"ID":"c45f41c1-0e51-4534-acde-9c5ab2597126","Type":"ContainerDied","Data":"a5042cf3393359ad6e09ad8a28e9d78691b628398a56e4487b29f3d172cac84f"} Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.518360 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a5042cf3393359ad6e09ad8a28e9d78691b628398a56e4487b29f3d172cac84f" Mar 20 13:42:25 crc kubenswrapper[4690]: I0320 13:42:25.518371 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-mxmrl-config-rq2k9" Mar 20 13:42:26 crc kubenswrapper[4690]: I0320 13:42:26.023826 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-mxmrl-config-rq2k9"] Mar 20 13:42:26 crc kubenswrapper[4690]: I0320 13:42:26.032664 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-mxmrl-config-rq2k9"] Mar 20 13:42:26 crc kubenswrapper[4690]: I0320 13:42:26.427826 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c45f41c1-0e51-4534-acde-9c5ab2597126" path="/var/lib/kubelet/pods/c45f41c1-0e51-4534-acde-9c5ab2597126/volumes" Mar 20 13:42:26 crc kubenswrapper[4690]: I0320 13:42:26.594586 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-rhzkk"] Mar 20 13:42:26 crc kubenswrapper[4690]: I0320 13:42:26.601405 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-rhzkk"] Mar 20 13:42:27 crc kubenswrapper[4690]: I0320 13:42:27.535758 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"c37d16d0a0dac8ebc450f41d931fecdd46e2cf62dd6972bfc2bcc0eccad6d9e8"} Mar 20 13:42:27 crc kubenswrapper[4690]: I0320 13:42:27.536118 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"8eaf3b2a6afc39d6dd81385ba6434ac37133b25b2ddcd3de8ceb4fa00cb1b745"} Mar 20 13:42:27 crc kubenswrapper[4690]: I0320 13:42:27.536135 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"9fb425ff1cdb2b8d09fd2cfed109cc0a5971c4b1acdb0099006cc9f545bc1717"} Mar 20 13:42:27 crc kubenswrapper[4690]: I0320 13:42:27.536173 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"8b43c8a4ac186bf45e259cce45406f5bb8da3f6b081aacdbf395e88c3137c19f"} Mar 20 13:42:28 crc kubenswrapper[4690]: I0320 13:42:28.428575 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d185ca61-daf3-4a53-8604-c79c1546d8e5" path="/var/lib/kubelet/pods/d185ca61-daf3-4a53-8604-c79c1546d8e5/volumes" Mar 20 13:42:30 crc kubenswrapper[4690]: I0320 13:42:30.569011 4690 generic.go:334] "Generic (PLEG): container finished" podID="e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e" containerID="39394e082621b97880542a3f7bdd98c3e70f119252c72189dab79ebec56063d1" exitCode=0 Mar 20 13:42:30 crc kubenswrapper[4690]: I0320 13:42:30.569076 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rf8w7" event={"ID":"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e","Type":"ContainerDied","Data":"39394e082621b97880542a3f7bdd98c3e70f119252c72189dab79ebec56063d1"} Mar 20 13:42:30 crc kubenswrapper[4690]: I0320 13:42:30.577960 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"c7124a2e702058e9209d3d3d05d67252f1f5b8f4d3937915bbe21502bc4e0183"} Mar 20 13:42:30 crc kubenswrapper[4690]: I0320 13:42:30.578277 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"c469f8ec7c01ea2d118d2b8f143a0e93bbd07776d5fd5b7598516b71ebec6a3e"} Mar 20 13:42:30 crc kubenswrapper[4690]: I0320 13:42:30.578295 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"782874eda045a6884e8f094be1bc9de25aec43dfe278f7f7a6515c139adf812f"} Mar 20 13:42:30 crc kubenswrapper[4690]: I0320 13:42:30.578307 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"07af6a592ee84cdc7fc25afffee5d8437b40c88f606458bcbd791b729f08bbd8"} Mar 20 13:42:30 crc kubenswrapper[4690]: I0320 13:42:30.578319 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"53ef3d95ab4eaf25a89de617b92971b2282afdf2d88ddaf053fe719b830ff16f"} Mar 20 13:42:30 crc kubenswrapper[4690]: I0320 13:42:30.636382 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.649995 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/root-account-create-update-nxkjc"] Mar 20 13:42:31 crc kubenswrapper[4690]: E0320 13:42:31.659595 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b26393a-1d00-4b21-a3b2-74518b7f0b3d" containerName="swift-ring-rebalance" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.659990 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b26393a-1d00-4b21-a3b2-74518b7f0b3d" containerName="swift-ring-rebalance" Mar 20 13:42:31 crc kubenswrapper[4690]: E0320 13:42:31.660072 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d185ca61-daf3-4a53-8604-c79c1546d8e5" containerName="mariadb-account-create-update" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.660144 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d185ca61-daf3-4a53-8604-c79c1546d8e5" containerName="mariadb-account-create-update" Mar 20 13:42:31 crc kubenswrapper[4690]: E0320 13:42:31.660231 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c45f41c1-0e51-4534-acde-9c5ab2597126" containerName="ovn-config" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.660301 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="c45f41c1-0e51-4534-acde-9c5ab2597126" containerName="ovn-config" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.660567 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b26393a-1d00-4b21-a3b2-74518b7f0b3d" containerName="swift-ring-rebalance" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.660650 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d185ca61-daf3-4a53-8604-c79c1546d8e5" containerName="mariadb-account-create-update" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.660732 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="c45f41c1-0e51-4534-acde-9c5ab2597126" containerName="ovn-config" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.661401 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"fee63ea2bdd32bbfcd1ed9d15a49d2fb268c14eef46db6b1f3c35a39f683d8cc"} Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.661520 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a","Type":"ContainerStarted","Data":"be3ef658515707ca244a6064876a6d9c0cc2675f59c8321c01887609472e0fa2"} Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.661676 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.665173 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-cell1-mariadb-root-db-secret" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.723752 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=24.154367087 podStartE2EDuration="31.723731848s" podCreationTimestamp="2026-03-20 13:42:00 +0000 UTC" firstStartedPulling="2026-03-20 13:42:21.995628529 +0000 UTC m=+1188.285228472" lastFinishedPulling="2026-03-20 13:42:29.56499329 +0000 UTC m=+1195.854593233" observedRunningTime="2026-03-20 13:42:31.702321603 +0000 UTC m=+1197.991921546" watchObservedRunningTime="2026-03-20 13:42:31.723731848 +0000 UTC m=+1198.013331791" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.724105 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-nxkjc"] Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.799370 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-operator-scripts\") pod \"root-account-create-update-nxkjc\" (UID: \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\") " pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.799567 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6trb\" (UniqueName: \"kubernetes.io/projected/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-kube-api-access-k6trb\") pod \"root-account-create-update-nxkjc\" (UID: \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\") " pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.901226 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6trb\" (UniqueName: \"kubernetes.io/projected/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-kube-api-access-k6trb\") pod \"root-account-create-update-nxkjc\" (UID: \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\") " pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.901307 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-operator-scripts\") pod \"root-account-create-update-nxkjc\" (UID: \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\") " pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.902268 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-operator-scripts\") pod \"root-account-create-update-nxkjc\" (UID: \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\") " pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:31 crc kubenswrapper[4690]: I0320 13:42:31.918606 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6trb\" (UniqueName: \"kubernetes.io/projected/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-kube-api-access-k6trb\") pod \"root-account-create-update-nxkjc\" (UID: \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\") " pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.036127 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.110339 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-6skgs"] Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.112042 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.118814 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.139777 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-6skgs"] Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.148475 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.210204 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.210275 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.210309 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.210336 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-config\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.210435 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.210519 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pzwb\" (UniqueName: \"kubernetes.io/projected/8ceb0669-ecea-443a-885e-95af36d21d0a-kube-api-access-5pzwb\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312023 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcsjr\" (UniqueName: \"kubernetes.io/projected/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-kube-api-access-fcsjr\") pod \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312236 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-db-sync-config-data\") pod \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312276 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-combined-ca-bundle\") pod \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312337 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-config-data\") pod \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\" (UID: \"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e\") " Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312506 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312534 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312551 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-config\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312588 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312647 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pzwb\" (UniqueName: \"kubernetes.io/projected/8ceb0669-ecea-443a-885e-95af36d21d0a-kube-api-access-5pzwb\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.312696 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.313569 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-sb\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.313605 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-svc\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.313629 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-swift-storage-0\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.313635 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-nb\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.315013 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-config\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.317543 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e" (UID: "e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.318102 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-kube-api-access-fcsjr" (OuterVolumeSpecName: "kube-api-access-fcsjr") pod "e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e" (UID: "e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e"). InnerVolumeSpecName "kube-api-access-fcsjr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.329139 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pzwb\" (UniqueName: \"kubernetes.io/projected/8ceb0669-ecea-443a-885e-95af36d21d0a-kube-api-access-5pzwb\") pod \"dnsmasq-dns-5c79d794d7-6skgs\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.334394 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e" (UID: "e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.358102 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-config-data" (OuterVolumeSpecName: "config-data") pod "e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e" (UID: "e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.414866 4690 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.414900 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.414910 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.414918 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcsjr\" (UniqueName: \"kubernetes.io/projected/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e-kube-api-access-fcsjr\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.461587 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:32 crc kubenswrapper[4690]: W0320 13:42:32.505178 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ca2df3b_74d4_4d7f_907f_7893a816cc3a.slice/crio-8eaa8c2086b689029de4f6057a922ae4d4d4823aff149c92ce8472d9db190b61 WatchSource:0}: Error finding container 8eaa8c2086b689029de4f6057a922ae4d4d4823aff149c92ce8472d9db190b61: Status 404 returned error can't find the container with id 8eaa8c2086b689029de4f6057a922ae4d4d4823aff149c92ce8472d9db190b61 Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.507672 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/root-account-create-update-nxkjc"] Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.669084 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-nxkjc" event={"ID":"4ca2df3b-74d4-4d7f-907f-7893a816cc3a","Type":"ContainerStarted","Data":"8eaa8c2086b689029de4f6057a922ae4d4d4823aff149c92ce8472d9db190b61"} Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.670319 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-rf8w7" event={"ID":"e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e","Type":"ContainerDied","Data":"8d7019ff2e188180d617874aaf959fcb1479049503bf65b3a726e3c6d437aa1e"} Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.670333 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-rf8w7" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.670341 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d7019ff2e188180d617874aaf959fcb1479049503bf65b3a726e3c6d437aa1e" Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.672195 4690 generic.go:334] "Generic (PLEG): container finished" podID="c9508cc5-d6ca-435f-949a-790440ed5f11" containerID="32923691aec687dd148bd0f2913887a5203003bde19d6d7cce2ef3d71a4d0f58" exitCode=0 Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.672274 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c9508cc5-d6ca-435f-949a-790440ed5f11","Type":"ContainerDied","Data":"32923691aec687dd148bd0f2913887a5203003bde19d6d7cce2ef3d71a4d0f58"} Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.973435 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-6skgs"] Mar 20 13:42:32 crc kubenswrapper[4690]: I0320 13:42:32.994625 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-6skgs"] Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.015356 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-fdmgt"] Mar 20 13:42:33 crc kubenswrapper[4690]: E0320 13:42:33.015713 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e" containerName="glance-db-sync" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.015729 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e" containerName="glance-db-sync" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.015907 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e" containerName="glance-db-sync" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.016643 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.034455 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-fdmgt"] Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.136394 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.136814 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-config\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.136836 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.136898 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.136937 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g2d6\" (UniqueName: \"kubernetes.io/projected/9e320fee-cd78-4d19-b2ac-23dd935a0894-kube-api-access-8g2d6\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.136971 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.238980 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g2d6\" (UniqueName: \"kubernetes.io/projected/9e320fee-cd78-4d19-b2ac-23dd935a0894-kube-api-access-8g2d6\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.239060 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.239133 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.239220 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-config\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.239244 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.239304 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.240288 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-nb\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.240806 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-sb\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.240898 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-swift-storage-0\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.241291 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-config\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.241453 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-svc\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.257708 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g2d6\" (UniqueName: \"kubernetes.io/projected/9e320fee-cd78-4d19-b2ac-23dd935a0894-kube-api-access-8g2d6\") pod \"dnsmasq-dns-5f59b8f679-fdmgt\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.501198 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.686046 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c9508cc5-d6ca-435f-949a-790440ed5f11","Type":"ContainerStarted","Data":"8467ee829c19ecae54ebac2c9bf202169b53b7850eb7bdac7a064695e25946f0"} Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.686597 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.688041 4690 generic.go:334] "Generic (PLEG): container finished" podID="4ca2df3b-74d4-4d7f-907f-7893a816cc3a" containerID="bef8e245fb0edee4e91198217bb3b3de03a43166943d42f2a03766f3ea36ca06" exitCode=0 Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.688108 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-nxkjc" event={"ID":"4ca2df3b-74d4-4d7f-907f-7893a816cc3a","Type":"ContainerDied","Data":"bef8e245fb0edee4e91198217bb3b3de03a43166943d42f2a03766f3ea36ca06"} Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.689552 4690 generic.go:334] "Generic (PLEG): container finished" podID="b6c3ab56-9d3c-431c-a697-d6df19b67a21" containerID="4a558a53d5a7c50e845544545240ecc9aa85af75ed3990048851148c43c08581" exitCode=0 Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.689603 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b6c3ab56-9d3c-431c-a697-d6df19b67a21","Type":"ContainerDied","Data":"4a558a53d5a7c50e845544545240ecc9aa85af75ed3990048851148c43c08581"} Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.694532 4690 generic.go:334] "Generic (PLEG): container finished" podID="8ceb0669-ecea-443a-885e-95af36d21d0a" containerID="d304408c596052a8093231324c309ff6fd5ddb1bb73cd288eb9a2a5c08001e63" exitCode=0 Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.694734 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" event={"ID":"8ceb0669-ecea-443a-885e-95af36d21d0a","Type":"ContainerDied","Data":"d304408c596052a8093231324c309ff6fd5ddb1bb73cd288eb9a2a5c08001e63"} Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.694758 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" event={"ID":"8ceb0669-ecea-443a-885e-95af36d21d0a","Type":"ContainerStarted","Data":"4b4d576efdc940f07464398a7973c7283ad512b68cbb8ce6d578f31ed52a3f4e"} Mar 20 13:42:33 crc kubenswrapper[4690]: I0320 13:42:33.719709 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=52.750485652 podStartE2EDuration="1m20.719691723s" podCreationTimestamp="2026-03-20 13:41:13 +0000 UTC" firstStartedPulling="2026-03-20 13:41:26.885180606 +0000 UTC m=+1133.174780549" lastFinishedPulling="2026-03-20 13:41:54.854386677 +0000 UTC m=+1161.143986620" observedRunningTime="2026-03-20 13:42:33.713735712 +0000 UTC m=+1200.003335655" watchObservedRunningTime="2026-03-20 13:42:33.719691723 +0000 UTC m=+1200.009291666" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.031163 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-fdmgt"] Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.266114 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.356471 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-nb\") pod \"8ceb0669-ecea-443a-885e-95af36d21d0a\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.356541 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-svc\") pod \"8ceb0669-ecea-443a-885e-95af36d21d0a\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.356617 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-config\") pod \"8ceb0669-ecea-443a-885e-95af36d21d0a\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.356652 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pzwb\" (UniqueName: \"kubernetes.io/projected/8ceb0669-ecea-443a-885e-95af36d21d0a-kube-api-access-5pzwb\") pod \"8ceb0669-ecea-443a-885e-95af36d21d0a\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.356691 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-sb\") pod \"8ceb0669-ecea-443a-885e-95af36d21d0a\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.356763 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-swift-storage-0\") pod \"8ceb0669-ecea-443a-885e-95af36d21d0a\" (UID: \"8ceb0669-ecea-443a-885e-95af36d21d0a\") " Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.362652 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ceb0669-ecea-443a-885e-95af36d21d0a-kube-api-access-5pzwb" (OuterVolumeSpecName: "kube-api-access-5pzwb") pod "8ceb0669-ecea-443a-885e-95af36d21d0a" (UID: "8ceb0669-ecea-443a-885e-95af36d21d0a"). InnerVolumeSpecName "kube-api-access-5pzwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.377902 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "8ceb0669-ecea-443a-885e-95af36d21d0a" (UID: "8ceb0669-ecea-443a-885e-95af36d21d0a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.383208 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "8ceb0669-ecea-443a-885e-95af36d21d0a" (UID: "8ceb0669-ecea-443a-885e-95af36d21d0a"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.389374 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "8ceb0669-ecea-443a-885e-95af36d21d0a" (UID: "8ceb0669-ecea-443a-885e-95af36d21d0a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.389427 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "8ceb0669-ecea-443a-885e-95af36d21d0a" (UID: "8ceb0669-ecea-443a-885e-95af36d21d0a"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.398121 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-config" (OuterVolumeSpecName: "config") pod "8ceb0669-ecea-443a-885e-95af36d21d0a" (UID: "8ceb0669-ecea-443a-885e-95af36d21d0a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.459715 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.459761 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pzwb\" (UniqueName: \"kubernetes.io/projected/8ceb0669-ecea-443a-885e-95af36d21d0a-kube-api-access-5pzwb\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.459775 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.459788 4690 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.459797 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.459808 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/8ceb0669-ecea-443a-885e-95af36d21d0a-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.706679 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b6c3ab56-9d3c-431c-a697-d6df19b67a21","Type":"ContainerStarted","Data":"5141b0d33d8a9bf8e59cdd9b8121ee5852c480971d3c43d776b2a492efdc273d"} Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.707740 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.711456 4690 generic.go:334] "Generic (PLEG): container finished" podID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerID="a664555c062840e44661f8ab5f0afa2c9f8b795f7a48ba1480cecbe44fc15405" exitCode=0 Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.711527 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" event={"ID":"9e320fee-cd78-4d19-b2ac-23dd935a0894","Type":"ContainerDied","Data":"a664555c062840e44661f8ab5f0afa2c9f8b795f7a48ba1480cecbe44fc15405"} Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.711552 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" event={"ID":"9e320fee-cd78-4d19-b2ac-23dd935a0894","Type":"ContainerStarted","Data":"8022aea8874a090584de67eca1a937125334d4d6970da8e98a835da26f02275f"} Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.716743 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.716983 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c79d794d7-6skgs" event={"ID":"8ceb0669-ecea-443a-885e-95af36d21d0a","Type":"ContainerDied","Data":"4b4d576efdc940f07464398a7973c7283ad512b68cbb8ce6d578f31ed52a3f4e"} Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.717016 4690 scope.go:117] "RemoveContainer" containerID="d304408c596052a8093231324c309ff6fd5ddb1bb73cd288eb9a2a5c08001e63" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.742999 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371955.111809 podStartE2EDuration="1m21.742966087s" podCreationTimestamp="2026-03-20 13:41:13 +0000 UTC" firstStartedPulling="2026-03-20 13:41:27.545640038 +0000 UTC m=+1133.835239981" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:42:34.734889346 +0000 UTC m=+1201.024489299" watchObservedRunningTime="2026-03-20 13:42:34.742966087 +0000 UTC m=+1201.032566030" Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.843483 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-6skgs"] Mar 20 13:42:34 crc kubenswrapper[4690]: I0320 13:42:34.875887 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c79d794d7-6skgs"] Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.182098 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.283619 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-operator-scripts\") pod \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\" (UID: \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\") " Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.284170 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6trb\" (UniqueName: \"kubernetes.io/projected/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-kube-api-access-k6trb\") pod \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\" (UID: \"4ca2df3b-74d4-4d7f-907f-7893a816cc3a\") " Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.284347 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4ca2df3b-74d4-4d7f-907f-7893a816cc3a" (UID: "4ca2df3b-74d4-4d7f-907f-7893a816cc3a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.284586 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.297093 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-kube-api-access-k6trb" (OuterVolumeSpecName: "kube-api-access-k6trb") pod "4ca2df3b-74d4-4d7f-907f-7893a816cc3a" (UID: "4ca2df3b-74d4-4d7f-907f-7893a816cc3a"). InnerVolumeSpecName "kube-api-access-k6trb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.386278 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6trb\" (UniqueName: \"kubernetes.io/projected/4ca2df3b-74d4-4d7f-907f-7893a816cc3a-kube-api-access-k6trb\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.727193 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" event={"ID":"9e320fee-cd78-4d19-b2ac-23dd935a0894","Type":"ContainerStarted","Data":"b609a331b6c8b2fcb43e5bb73f626c651e6f9c616a53f06abf6ef6b7cd424c9e"} Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.728253 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.731484 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/root-account-create-update-nxkjc" Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.734598 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/root-account-create-update-nxkjc" event={"ID":"4ca2df3b-74d4-4d7f-907f-7893a816cc3a","Type":"ContainerDied","Data":"8eaa8c2086b689029de4f6057a922ae4d4d4823aff149c92ce8472d9db190b61"} Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.734657 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8eaa8c2086b689029de4f6057a922ae4d4d4823aff149c92ce8472d9db190b61" Mar 20 13:42:35 crc kubenswrapper[4690]: I0320 13:42:35.758876 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" podStartSLOduration=3.758829249 podStartE2EDuration="3.758829249s" podCreationTimestamp="2026-03-20 13:42:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:42:35.752208749 +0000 UTC m=+1202.041808702" watchObservedRunningTime="2026-03-20 13:42:35.758829249 +0000 UTC m=+1202.048429212" Mar 20 13:42:36 crc kubenswrapper[4690]: I0320 13:42:36.424278 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ceb0669-ecea-443a-885e-95af36d21d0a" path="/var/lib/kubelet/pods/8ceb0669-ecea-443a-885e-95af36d21d0a/volumes" Mar 20 13:42:43 crc kubenswrapper[4690]: I0320 13:42:43.503061 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:42:43 crc kubenswrapper[4690]: I0320 13:42:43.573477 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-2cfpf"] Mar 20 13:42:43 crc kubenswrapper[4690]: I0320 13:42:43.574014 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" podUID="a05f19fb-0bd7-418d-bf6d-1dec04ed9529" containerName="dnsmasq-dns" containerID="cri-o://8769f13c1a88f9d95d1f94ce081df243e151b4a351b2e7701b1afb329da51bee" gracePeriod=10 Mar 20 13:42:43 crc kubenswrapper[4690]: I0320 13:42:43.807308 4690 generic.go:334] "Generic (PLEG): container finished" podID="a05f19fb-0bd7-418d-bf6d-1dec04ed9529" containerID="8769f13c1a88f9d95d1f94ce081df243e151b4a351b2e7701b1afb329da51bee" exitCode=0 Mar 20 13:42:43 crc kubenswrapper[4690]: I0320 13:42:43.807349 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" event={"ID":"a05f19fb-0bd7-418d-bf6d-1dec04ed9529","Type":"ContainerDied","Data":"8769f13c1a88f9d95d1f94ce081df243e151b4a351b2e7701b1afb329da51bee"} Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.112895 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.273673 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-config\") pod \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.273726 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-sb\") pod \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.273885 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvprh\" (UniqueName: \"kubernetes.io/projected/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-kube-api-access-xvprh\") pod \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.273934 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-nb\") pod \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.273957 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-dns-svc\") pod \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\" (UID: \"a05f19fb-0bd7-418d-bf6d-1dec04ed9529\") " Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.291100 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-kube-api-access-xvprh" (OuterVolumeSpecName: "kube-api-access-xvprh") pod "a05f19fb-0bd7-418d-bf6d-1dec04ed9529" (UID: "a05f19fb-0bd7-418d-bf6d-1dec04ed9529"). InnerVolumeSpecName "kube-api-access-xvprh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.322123 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a05f19fb-0bd7-418d-bf6d-1dec04ed9529" (UID: "a05f19fb-0bd7-418d-bf6d-1dec04ed9529"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.324330 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a05f19fb-0bd7-418d-bf6d-1dec04ed9529" (UID: "a05f19fb-0bd7-418d-bf6d-1dec04ed9529"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.329494 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a05f19fb-0bd7-418d-bf6d-1dec04ed9529" (UID: "a05f19fb-0bd7-418d-bf6d-1dec04ed9529"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.344424 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-config" (OuterVolumeSpecName: "config") pod "a05f19fb-0bd7-418d-bf6d-1dec04ed9529" (UID: "a05f19fb-0bd7-418d-bf6d-1dec04ed9529"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.375647 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.375680 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.375696 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvprh\" (UniqueName: \"kubernetes.io/projected/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-kube-api-access-xvprh\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.375707 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.375717 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a05f19fb-0bd7-418d-bf6d-1dec04ed9529-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.513023 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.757019 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.815871 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" event={"ID":"a05f19fb-0bd7-418d-bf6d-1dec04ed9529","Type":"ContainerDied","Data":"c8f281fb1d4e57267455ab0a80d77698e56c24632aaa4ca4cfb12db00f950240"} Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.815924 4690 scope.go:117] "RemoveContainer" containerID="8769f13c1a88f9d95d1f94ce081df243e151b4a351b2e7701b1afb329da51bee" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.815933 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-2cfpf" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.854791 4690 scope.go:117] "RemoveContainer" containerID="8dc8d7a1885f0b543a5fbd2aa653f921c59ee6a7c27ec1799c8730f9450f2d18" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.863905 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-hw7cv"] Mar 20 13:42:44 crc kubenswrapper[4690]: E0320 13:42:44.864268 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a05f19fb-0bd7-418d-bf6d-1dec04ed9529" containerName="init" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.864281 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a05f19fb-0bd7-418d-bf6d-1dec04ed9529" containerName="init" Mar 20 13:42:44 crc kubenswrapper[4690]: E0320 13:42:44.864302 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a05f19fb-0bd7-418d-bf6d-1dec04ed9529" containerName="dnsmasq-dns" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.864308 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a05f19fb-0bd7-418d-bf6d-1dec04ed9529" containerName="dnsmasq-dns" Mar 20 13:42:44 crc kubenswrapper[4690]: E0320 13:42:44.864317 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ca2df3b-74d4-4d7f-907f-7893a816cc3a" containerName="mariadb-account-create-update" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.864326 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ca2df3b-74d4-4d7f-907f-7893a816cc3a" containerName="mariadb-account-create-update" Mar 20 13:42:44 crc kubenswrapper[4690]: E0320 13:42:44.864335 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ceb0669-ecea-443a-885e-95af36d21d0a" containerName="init" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.864340 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ceb0669-ecea-443a-885e-95af36d21d0a" containerName="init" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.864473 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="a05f19fb-0bd7-418d-bf6d-1dec04ed9529" containerName="dnsmasq-dns" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.864484 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ca2df3b-74d4-4d7f-907f-7893a816cc3a" containerName="mariadb-account-create-update" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.864497 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ceb0669-ecea-443a-885e-95af36d21d0a" containerName="init" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.865005 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.899919 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-2cfpf"] Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.918570 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-2cfpf"] Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.928400 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-hw7cv"] Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.973894 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-9a17-account-create-update-jcbrv"] Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.974865 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.978206 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-9a17-account-create-update-jcbrv"] Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.980185 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.992275 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94049a0c-7da4-43be-8e15-36e9a282f728-operator-scripts\") pod \"cinder-db-create-hw7cv\" (UID: \"94049a0c-7da4-43be-8e15-36e9a282f728\") " pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:44 crc kubenswrapper[4690]: I0320 13:42:44.992421 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcfnv\" (UniqueName: \"kubernetes.io/projected/94049a0c-7da4-43be-8e15-36e9a282f728-kube-api-access-hcfnv\") pod \"cinder-db-create-hw7cv\" (UID: \"94049a0c-7da4-43be-8e15-36e9a282f728\") " pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.036345 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-fhxfq"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.037276 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.044728 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-fhxfq"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.095538 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pc498\" (UniqueName: \"kubernetes.io/projected/519b3e37-0f94-4018-97e2-7c7b0b99df0d-kube-api-access-pc498\") pod \"cinder-9a17-account-create-update-jcbrv\" (UID: \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\") " pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.095597 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94049a0c-7da4-43be-8e15-36e9a282f728-operator-scripts\") pod \"cinder-db-create-hw7cv\" (UID: \"94049a0c-7da4-43be-8e15-36e9a282f728\") " pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.095650 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519b3e37-0f94-4018-97e2-7c7b0b99df0d-operator-scripts\") pod \"cinder-9a17-account-create-update-jcbrv\" (UID: \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\") " pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.095678 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcfnv\" (UniqueName: \"kubernetes.io/projected/94049a0c-7da4-43be-8e15-36e9a282f728-kube-api-access-hcfnv\") pod \"cinder-db-create-hw7cv\" (UID: \"94049a0c-7da4-43be-8e15-36e9a282f728\") " pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.096379 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94049a0c-7da4-43be-8e15-36e9a282f728-operator-scripts\") pod \"cinder-db-create-hw7cv\" (UID: \"94049a0c-7da4-43be-8e15-36e9a282f728\") " pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.124579 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcfnv\" (UniqueName: \"kubernetes.io/projected/94049a0c-7da4-43be-8e15-36e9a282f728-kube-api-access-hcfnv\") pod \"cinder-db-create-hw7cv\" (UID: \"94049a0c-7da4-43be-8e15-36e9a282f728\") " pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.132716 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-78ee-account-create-update-88b9m"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.135655 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.137965 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.142346 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-78ee-account-create-update-88b9m"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.199786 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519b3e37-0f94-4018-97e2-7c7b0b99df0d-operator-scripts\") pod \"cinder-9a17-account-create-update-jcbrv\" (UID: \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\") " pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.199964 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-operator-scripts\") pod \"barbican-db-create-fhxfq\" (UID: \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\") " pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.200112 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tmpd\" (UniqueName: \"kubernetes.io/projected/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-kube-api-access-9tmpd\") pod \"barbican-db-create-fhxfq\" (UID: \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\") " pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.200167 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pc498\" (UniqueName: \"kubernetes.io/projected/519b3e37-0f94-4018-97e2-7c7b0b99df0d-kube-api-access-pc498\") pod \"cinder-9a17-account-create-update-jcbrv\" (UID: \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\") " pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.201372 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519b3e37-0f94-4018-97e2-7c7b0b99df0d-operator-scripts\") pod \"cinder-9a17-account-create-update-jcbrv\" (UID: \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\") " pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.222751 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.226472 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-gblwq"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.227519 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.234453 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-be37-account-create-update-2w6zx"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.235690 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pc498\" (UniqueName: \"kubernetes.io/projected/519b3e37-0f94-4018-97e2-7c7b0b99df0d-kube-api-access-pc498\") pod \"cinder-9a17-account-create-update-jcbrv\" (UID: \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\") " pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.236652 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.240501 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.250548 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-gblwq"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.256228 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-be37-account-create-update-2w6zx"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.290922 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-r8lpw"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.291915 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.295284 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.296769 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-r8lpw"] Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.299356 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cwsn8" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.299574 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.299731 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.299865 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.301014 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-operator-scripts\") pod \"neutron-db-create-gblwq\" (UID: \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\") " pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.301045 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2wf8\" (UniqueName: \"kubernetes.io/projected/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-kube-api-access-d2wf8\") pod \"neutron-db-create-gblwq\" (UID: \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\") " pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.301092 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02be9901-f882-45b3-8d1e-9105f2551417-operator-scripts\") pod \"barbican-78ee-account-create-update-88b9m\" (UID: \"02be9901-f882-45b3-8d1e-9105f2551417\") " pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.301129 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-operator-scripts\") pod \"barbican-db-create-fhxfq\" (UID: \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\") " pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.301165 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89k9g\" (UniqueName: \"kubernetes.io/projected/02be9901-f882-45b3-8d1e-9105f2551417-kube-api-access-89k9g\") pod \"barbican-78ee-account-create-update-88b9m\" (UID: \"02be9901-f882-45b3-8d1e-9105f2551417\") " pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.302055 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tmpd\" (UniqueName: \"kubernetes.io/projected/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-kube-api-access-9tmpd\") pod \"barbican-db-create-fhxfq\" (UID: \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\") " pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.302536 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-operator-scripts\") pod \"barbican-db-create-fhxfq\" (UID: \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\") " pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.323742 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tmpd\" (UniqueName: \"kubernetes.io/projected/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-kube-api-access-9tmpd\") pod \"barbican-db-create-fhxfq\" (UID: \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\") " pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.358584 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.413134 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89k9g\" (UniqueName: \"kubernetes.io/projected/02be9901-f882-45b3-8d1e-9105f2551417-kube-api-access-89k9g\") pod \"barbican-78ee-account-create-update-88b9m\" (UID: \"02be9901-f882-45b3-8d1e-9105f2551417\") " pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.413194 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2d455\" (UniqueName: \"kubernetes.io/projected/b4fcecc7-f191-472f-abcc-d886648e5ecc-kube-api-access-2d455\") pod \"neutron-be37-account-create-update-2w6zx\" (UID: \"b4fcecc7-f191-472f-abcc-d886648e5ecc\") " pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.413220 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ndmd\" (UniqueName: \"kubernetes.io/projected/75808517-3db4-41a1-ac99-99324152c26d-kube-api-access-6ndmd\") pod \"keystone-db-sync-r8lpw\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.413273 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-operator-scripts\") pod \"neutron-db-create-gblwq\" (UID: \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\") " pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.413364 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2wf8\" (UniqueName: \"kubernetes.io/projected/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-kube-api-access-d2wf8\") pod \"neutron-db-create-gblwq\" (UID: \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\") " pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.413433 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4fcecc7-f191-472f-abcc-d886648e5ecc-operator-scripts\") pod \"neutron-be37-account-create-update-2w6zx\" (UID: \"b4fcecc7-f191-472f-abcc-d886648e5ecc\") " pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.413465 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-combined-ca-bundle\") pod \"keystone-db-sync-r8lpw\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.413564 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02be9901-f882-45b3-8d1e-9105f2551417-operator-scripts\") pod \"barbican-78ee-account-create-update-88b9m\" (UID: \"02be9901-f882-45b3-8d1e-9105f2551417\") " pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.413748 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-config-data\") pod \"keystone-db-sync-r8lpw\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.414506 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02be9901-f882-45b3-8d1e-9105f2551417-operator-scripts\") pod \"barbican-78ee-account-create-update-88b9m\" (UID: \"02be9901-f882-45b3-8d1e-9105f2551417\") " pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.417119 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-operator-scripts\") pod \"neutron-db-create-gblwq\" (UID: \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\") " pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.428639 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2wf8\" (UniqueName: \"kubernetes.io/projected/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-kube-api-access-d2wf8\") pod \"neutron-db-create-gblwq\" (UID: \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\") " pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.431640 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89k9g\" (UniqueName: \"kubernetes.io/projected/02be9901-f882-45b3-8d1e-9105f2551417-kube-api-access-89k9g\") pod \"barbican-78ee-account-create-update-88b9m\" (UID: \"02be9901-f882-45b3-8d1e-9105f2551417\") " pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.472293 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.516765 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-config-data\") pod \"keystone-db-sync-r8lpw\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.516826 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2d455\" (UniqueName: \"kubernetes.io/projected/b4fcecc7-f191-472f-abcc-d886648e5ecc-kube-api-access-2d455\") pod \"neutron-be37-account-create-update-2w6zx\" (UID: \"b4fcecc7-f191-472f-abcc-d886648e5ecc\") " pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.516842 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ndmd\" (UniqueName: \"kubernetes.io/projected/75808517-3db4-41a1-ac99-99324152c26d-kube-api-access-6ndmd\") pod \"keystone-db-sync-r8lpw\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.522278 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4fcecc7-f191-472f-abcc-d886648e5ecc-operator-scripts\") pod \"neutron-be37-account-create-update-2w6zx\" (UID: \"b4fcecc7-f191-472f-abcc-d886648e5ecc\") " pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.522339 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-combined-ca-bundle\") pod \"keystone-db-sync-r8lpw\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.523226 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4fcecc7-f191-472f-abcc-d886648e5ecc-operator-scripts\") pod \"neutron-be37-account-create-update-2w6zx\" (UID: \"b4fcecc7-f191-472f-abcc-d886648e5ecc\") " pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.526860 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-config-data\") pod \"keystone-db-sync-r8lpw\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.527547 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-combined-ca-bundle\") pod \"keystone-db-sync-r8lpw\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.531788 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ndmd\" (UniqueName: \"kubernetes.io/projected/75808517-3db4-41a1-ac99-99324152c26d-kube-api-access-6ndmd\") pod \"keystone-db-sync-r8lpw\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.537302 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2d455\" (UniqueName: \"kubernetes.io/projected/b4fcecc7-f191-472f-abcc-d886648e5ecc-kube-api-access-2d455\") pod \"neutron-be37-account-create-update-2w6zx\" (UID: \"b4fcecc7-f191-472f-abcc-d886648e5ecc\") " pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.589337 4690 scope.go:117] "RemoveContainer" containerID="6186b1025faebc9e90fb5ca0f0e37acaca84549a637abaa45ddd86af1f503d2e" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.640339 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.657759 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.664375 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:45 crc kubenswrapper[4690]: I0320 13:42:45.710101 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-hw7cv"] Mar 20 13:42:45 crc kubenswrapper[4690]: W0320 13:42:45.722970 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94049a0c_7da4_43be_8e15_36e9a282f728.slice/crio-30bcc04bc9c688e6eaa2547767051ddeabdaf8c1a5457260e0b3a1a1f1844ff9 WatchSource:0}: Error finding container 30bcc04bc9c688e6eaa2547767051ddeabdaf8c1a5457260e0b3a1a1f1844ff9: Status 404 returned error can't find the container with id 30bcc04bc9c688e6eaa2547767051ddeabdaf8c1a5457260e0b3a1a1f1844ff9 Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:45.825217 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-9a17-account-create-update-jcbrv"] Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:45.873042 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-fhxfq"] Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:45.886908 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-hw7cv" event={"ID":"94049a0c-7da4-43be-8e15-36e9a282f728","Type":"ContainerStarted","Data":"30bcc04bc9c688e6eaa2547767051ddeabdaf8c1a5457260e0b3a1a1f1844ff9"} Mar 20 13:42:46 crc kubenswrapper[4690]: W0320 13:42:45.911536 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0054dac3_92d7_4c60_8d3e_a4b0f4b48dfa.slice/crio-86866559d1e409a67b579109ade0f58fefc025cc7add3b3bddd1c38b14a916b3 WatchSource:0}: Error finding container 86866559d1e409a67b579109ade0f58fefc025cc7add3b3bddd1c38b14a916b3: Status 404 returned error can't find the container with id 86866559d1e409a67b579109ade0f58fefc025cc7add3b3bddd1c38b14a916b3 Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:45.973426 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-78ee-account-create-update-88b9m"] Mar 20 13:42:46 crc kubenswrapper[4690]: W0320 13:42:45.981611 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02be9901_f882_45b3_8d1e_9105f2551417.slice/crio-cb261ba017e345e56d4848d917288a496350a5f0da359381839b972dc24e0318 WatchSource:0}: Error finding container cb261ba017e345e56d4848d917288a496350a5f0da359381839b972dc24e0318: Status 404 returned error can't find the container with id cb261ba017e345e56d4848d917288a496350a5f0da359381839b972dc24e0318 Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.427402 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a05f19fb-0bd7-418d-bf6d-1dec04ed9529" path="/var/lib/kubelet/pods/a05f19fb-0bd7-418d-bf6d-1dec04ed9529/volumes" Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.859301 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-be37-account-create-update-2w6zx"] Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.871974 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-r8lpw"] Mar 20 13:42:46 crc kubenswrapper[4690]: W0320 13:42:46.874949 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75808517_3db4_41a1_ac99_99324152c26d.slice/crio-3379e20c366fa09bd271e8531331b7763289e6408bc47bb17dd45e29ba25d3f8 WatchSource:0}: Error finding container 3379e20c366fa09bd271e8531331b7763289e6408bc47bb17dd45e29ba25d3f8: Status 404 returned error can't find the container with id 3379e20c366fa09bd271e8531331b7763289e6408bc47bb17dd45e29ba25d3f8 Mar 20 13:42:46 crc kubenswrapper[4690]: W0320 13:42:46.877357 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb4fcecc7_f191_472f_abcc_d886648e5ecc.slice/crio-eab02a84f314b5bc22ccefea91ffc5810adb97a86b163b6da4437771360783ed WatchSource:0}: Error finding container eab02a84f314b5bc22ccefea91ffc5810adb97a86b163b6da4437771360783ed: Status 404 returned error can't find the container with id eab02a84f314b5bc22ccefea91ffc5810adb97a86b163b6da4437771360783ed Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.892374 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-gblwq"] Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.901582 4690 generic.go:334] "Generic (PLEG): container finished" podID="519b3e37-0f94-4018-97e2-7c7b0b99df0d" containerID="329d01534bfd7a81220169a396b9bed1e88369dc534b29f8fa99af55ce622761" exitCode=0 Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.901635 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9a17-account-create-update-jcbrv" event={"ID":"519b3e37-0f94-4018-97e2-7c7b0b99df0d","Type":"ContainerDied","Data":"329d01534bfd7a81220169a396b9bed1e88369dc534b29f8fa99af55ce622761"} Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.901661 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9a17-account-create-update-jcbrv" event={"ID":"519b3e37-0f94-4018-97e2-7c7b0b99df0d","Type":"ContainerStarted","Data":"4c6277f89e6f41f356c2025cefc86e56a48d15d324a68cdc8807417b36909c58"} Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.903200 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-r8lpw" event={"ID":"75808517-3db4-41a1-ac99-99324152c26d","Type":"ContainerStarted","Data":"3379e20c366fa09bd271e8531331b7763289e6408bc47bb17dd45e29ba25d3f8"} Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.904572 4690 generic.go:334] "Generic (PLEG): container finished" podID="02be9901-f882-45b3-8d1e-9105f2551417" containerID="e1544a1675cfd2b2cfb02566259ee5a3b716d38ea4bc17cf23bfb782bdc54bdc" exitCode=0 Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.904615 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-78ee-account-create-update-88b9m" event={"ID":"02be9901-f882-45b3-8d1e-9105f2551417","Type":"ContainerDied","Data":"e1544a1675cfd2b2cfb02566259ee5a3b716d38ea4bc17cf23bfb782bdc54bdc"} Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.904628 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-78ee-account-create-update-88b9m" event={"ID":"02be9901-f882-45b3-8d1e-9105f2551417","Type":"ContainerStarted","Data":"cb261ba017e345e56d4848d917288a496350a5f0da359381839b972dc24e0318"} Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.905708 4690 generic.go:334] "Generic (PLEG): container finished" podID="0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa" containerID="4c1262cac50f850443ac0784133a3bec13148564576f72fd8bd86dd7939ee867" exitCode=0 Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.905738 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fhxfq" event={"ID":"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa","Type":"ContainerDied","Data":"4c1262cac50f850443ac0784133a3bec13148564576f72fd8bd86dd7939ee867"} Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.905751 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fhxfq" event={"ID":"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa","Type":"ContainerStarted","Data":"86866559d1e409a67b579109ade0f58fefc025cc7add3b3bddd1c38b14a916b3"} Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.911139 4690 generic.go:334] "Generic (PLEG): container finished" podID="94049a0c-7da4-43be-8e15-36e9a282f728" containerID="cdc32408e28f9ccb44f848d88c919534fcaa88503c1d8e4ed1faeffa24eef7b6" exitCode=0 Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.911217 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-hw7cv" event={"ID":"94049a0c-7da4-43be-8e15-36e9a282f728","Type":"ContainerDied","Data":"cdc32408e28f9ccb44f848d88c919534fcaa88503c1d8e4ed1faeffa24eef7b6"} Mar 20 13:42:46 crc kubenswrapper[4690]: I0320 13:42:46.912977 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-be37-account-create-update-2w6zx" event={"ID":"b4fcecc7-f191-472f-abcc-d886648e5ecc","Type":"ContainerStarted","Data":"eab02a84f314b5bc22ccefea91ffc5810adb97a86b163b6da4437771360783ed"} Mar 20 13:42:47 crc kubenswrapper[4690]: I0320 13:42:47.922008 4690 generic.go:334] "Generic (PLEG): container finished" podID="b4fcecc7-f191-472f-abcc-d886648e5ecc" containerID="8b11fa790332f407aebacf84b9bf8b75c7146e07a70f286809d47f6bf74acded" exitCode=0 Mar 20 13:42:47 crc kubenswrapper[4690]: I0320 13:42:47.922309 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-be37-account-create-update-2w6zx" event={"ID":"b4fcecc7-f191-472f-abcc-d886648e5ecc","Type":"ContainerDied","Data":"8b11fa790332f407aebacf84b9bf8b75c7146e07a70f286809d47f6bf74acded"} Mar 20 13:42:47 crc kubenswrapper[4690]: I0320 13:42:47.923908 4690 generic.go:334] "Generic (PLEG): container finished" podID="49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9" containerID="a6c88d86c601e34db86bc4703a7d5c89bae18abe3c3068f5ed652fe9d4520297" exitCode=0 Mar 20 13:42:47 crc kubenswrapper[4690]: I0320 13:42:47.924057 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-gblwq" event={"ID":"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9","Type":"ContainerDied","Data":"a6c88d86c601e34db86bc4703a7d5c89bae18abe3c3068f5ed652fe9d4520297"} Mar 20 13:42:47 crc kubenswrapper[4690]: I0320 13:42:47.924078 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-gblwq" event={"ID":"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9","Type":"ContainerStarted","Data":"b04a172630557e5a6d659bf334c8e932f7b2d51f4781e275b78caca0842895b8"} Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.330719 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.469405 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-operator-scripts\") pod \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\" (UID: \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\") " Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.469653 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tmpd\" (UniqueName: \"kubernetes.io/projected/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-kube-api-access-9tmpd\") pod \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\" (UID: \"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa\") " Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.471325 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa" (UID: "0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.476513 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-kube-api-access-9tmpd" (OuterVolumeSpecName: "kube-api-access-9tmpd") pod "0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa" (UID: "0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa"). InnerVolumeSpecName "kube-api-access-9tmpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.530076 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.535709 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.553055 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.572082 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.572107 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tmpd\" (UniqueName: \"kubernetes.io/projected/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa-kube-api-access-9tmpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.673121 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02be9901-f882-45b3-8d1e-9105f2551417-operator-scripts\") pod \"02be9901-f882-45b3-8d1e-9105f2551417\" (UID: \"02be9901-f882-45b3-8d1e-9105f2551417\") " Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.673288 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-89k9g\" (UniqueName: \"kubernetes.io/projected/02be9901-f882-45b3-8d1e-9105f2551417-kube-api-access-89k9g\") pod \"02be9901-f882-45b3-8d1e-9105f2551417\" (UID: \"02be9901-f882-45b3-8d1e-9105f2551417\") " Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.673356 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pc498\" (UniqueName: \"kubernetes.io/projected/519b3e37-0f94-4018-97e2-7c7b0b99df0d-kube-api-access-pc498\") pod \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\" (UID: \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\") " Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.673400 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcfnv\" (UniqueName: \"kubernetes.io/projected/94049a0c-7da4-43be-8e15-36e9a282f728-kube-api-access-hcfnv\") pod \"94049a0c-7da4-43be-8e15-36e9a282f728\" (UID: \"94049a0c-7da4-43be-8e15-36e9a282f728\") " Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.673437 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519b3e37-0f94-4018-97e2-7c7b0b99df0d-operator-scripts\") pod \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\" (UID: \"519b3e37-0f94-4018-97e2-7c7b0b99df0d\") " Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.673464 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94049a0c-7da4-43be-8e15-36e9a282f728-operator-scripts\") pod \"94049a0c-7da4-43be-8e15-36e9a282f728\" (UID: \"94049a0c-7da4-43be-8e15-36e9a282f728\") " Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.673616 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/02be9901-f882-45b3-8d1e-9105f2551417-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "02be9901-f882-45b3-8d1e-9105f2551417" (UID: "02be9901-f882-45b3-8d1e-9105f2551417"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.673988 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/94049a0c-7da4-43be-8e15-36e9a282f728-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "94049a0c-7da4-43be-8e15-36e9a282f728" (UID: "94049a0c-7da4-43be-8e15-36e9a282f728"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.673989 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/02be9901-f882-45b3-8d1e-9105f2551417-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.674285 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/519b3e37-0f94-4018-97e2-7c7b0b99df0d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "519b3e37-0f94-4018-97e2-7c7b0b99df0d" (UID: "519b3e37-0f94-4018-97e2-7c7b0b99df0d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.676821 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02be9901-f882-45b3-8d1e-9105f2551417-kube-api-access-89k9g" (OuterVolumeSpecName: "kube-api-access-89k9g") pod "02be9901-f882-45b3-8d1e-9105f2551417" (UID: "02be9901-f882-45b3-8d1e-9105f2551417"). InnerVolumeSpecName "kube-api-access-89k9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.677483 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/519b3e37-0f94-4018-97e2-7c7b0b99df0d-kube-api-access-pc498" (OuterVolumeSpecName: "kube-api-access-pc498") pod "519b3e37-0f94-4018-97e2-7c7b0b99df0d" (UID: "519b3e37-0f94-4018-97e2-7c7b0b99df0d"). InnerVolumeSpecName "kube-api-access-pc498". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.679326 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/94049a0c-7da4-43be-8e15-36e9a282f728-kube-api-access-hcfnv" (OuterVolumeSpecName: "kube-api-access-hcfnv") pod "94049a0c-7da4-43be-8e15-36e9a282f728" (UID: "94049a0c-7da4-43be-8e15-36e9a282f728"). InnerVolumeSpecName "kube-api-access-hcfnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.775319 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-89k9g\" (UniqueName: \"kubernetes.io/projected/02be9901-f882-45b3-8d1e-9105f2551417-kube-api-access-89k9g\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.775344 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pc498\" (UniqueName: \"kubernetes.io/projected/519b3e37-0f94-4018-97e2-7c7b0b99df0d-kube-api-access-pc498\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.775353 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcfnv\" (UniqueName: \"kubernetes.io/projected/94049a0c-7da4-43be-8e15-36e9a282f728-kube-api-access-hcfnv\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.775364 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/519b3e37-0f94-4018-97e2-7c7b0b99df0d-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.775372 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/94049a0c-7da4-43be-8e15-36e9a282f728-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.932491 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-hw7cv" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.932500 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-hw7cv" event={"ID":"94049a0c-7da4-43be-8e15-36e9a282f728","Type":"ContainerDied","Data":"30bcc04bc9c688e6eaa2547767051ddeabdaf8c1a5457260e0b3a1a1f1844ff9"} Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.932610 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="30bcc04bc9c688e6eaa2547767051ddeabdaf8c1a5457260e0b3a1a1f1844ff9" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.934939 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-fhxfq" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.934948 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-fhxfq" event={"ID":"0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa","Type":"ContainerDied","Data":"86866559d1e409a67b579109ade0f58fefc025cc7add3b3bddd1c38b14a916b3"} Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.934973 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="86866559d1e409a67b579109ade0f58fefc025cc7add3b3bddd1c38b14a916b3" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.941136 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-9a17-account-create-update-jcbrv" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.941176 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-9a17-account-create-update-jcbrv" event={"ID":"519b3e37-0f94-4018-97e2-7c7b0b99df0d","Type":"ContainerDied","Data":"4c6277f89e6f41f356c2025cefc86e56a48d15d324a68cdc8807417b36909c58"} Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.941219 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4c6277f89e6f41f356c2025cefc86e56a48d15d324a68cdc8807417b36909c58" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.942818 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-78ee-account-create-update-88b9m" event={"ID":"02be9901-f882-45b3-8d1e-9105f2551417","Type":"ContainerDied","Data":"cb261ba017e345e56d4848d917288a496350a5f0da359381839b972dc24e0318"} Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.942857 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb261ba017e345e56d4848d917288a496350a5f0da359381839b972dc24e0318" Mar 20 13:42:48 crc kubenswrapper[4690]: I0320 13:42:48.943012 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-78ee-account-create-update-88b9m" Mar 20 13:42:51 crc kubenswrapper[4690]: I0320 13:42:51.887577 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:51 crc kubenswrapper[4690]: I0320 13:42:51.893252 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:51 crc kubenswrapper[4690]: I0320 13:42:51.977525 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-be37-account-create-update-2w6zx" event={"ID":"b4fcecc7-f191-472f-abcc-d886648e5ecc","Type":"ContainerDied","Data":"eab02a84f314b5bc22ccefea91ffc5810adb97a86b163b6da4437771360783ed"} Mar 20 13:42:51 crc kubenswrapper[4690]: I0320 13:42:51.977999 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eab02a84f314b5bc22ccefea91ffc5810adb97a86b163b6da4437771360783ed" Mar 20 13:42:51 crc kubenswrapper[4690]: I0320 13:42:51.977591 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-be37-account-create-update-2w6zx" Mar 20 13:42:51 crc kubenswrapper[4690]: I0320 13:42:51.978980 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-gblwq" event={"ID":"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9","Type":"ContainerDied","Data":"b04a172630557e5a6d659bf334c8e932f7b2d51f4781e275b78caca0842895b8"} Mar 20 13:42:51 crc kubenswrapper[4690]: I0320 13:42:51.979064 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b04a172630557e5a6d659bf334c8e932f7b2d51f4781e275b78caca0842895b8" Mar 20 13:42:51 crc kubenswrapper[4690]: I0320 13:42:51.979163 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-gblwq" Mar 20 13:42:51 crc kubenswrapper[4690]: I0320 13:42:51.980481 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-r8lpw" event={"ID":"75808517-3db4-41a1-ac99-99324152c26d","Type":"ContainerStarted","Data":"7284d0860af431e8fdf37a986f140380d9d4dfdc40c07b70a50f32946980e80a"} Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.002408 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-r8lpw" podStartSLOduration=2.188164072 podStartE2EDuration="7.002390691s" podCreationTimestamp="2026-03-20 13:42:45 +0000 UTC" firstStartedPulling="2026-03-20 13:42:46.877661502 +0000 UTC m=+1213.167261435" lastFinishedPulling="2026-03-20 13:42:51.691888111 +0000 UTC m=+1217.981488054" observedRunningTime="2026-03-20 13:42:51.997508151 +0000 UTC m=+1218.287108094" watchObservedRunningTime="2026-03-20 13:42:52.002390691 +0000 UTC m=+1218.291990634" Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.030510 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4fcecc7-f191-472f-abcc-d886648e5ecc-operator-scripts\") pod \"b4fcecc7-f191-472f-abcc-d886648e5ecc\" (UID: \"b4fcecc7-f191-472f-abcc-d886648e5ecc\") " Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.030637 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-operator-scripts\") pod \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\" (UID: \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\") " Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.030741 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d455\" (UniqueName: \"kubernetes.io/projected/b4fcecc7-f191-472f-abcc-d886648e5ecc-kube-api-access-2d455\") pod \"b4fcecc7-f191-472f-abcc-d886648e5ecc\" (UID: \"b4fcecc7-f191-472f-abcc-d886648e5ecc\") " Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.030775 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2wf8\" (UniqueName: \"kubernetes.io/projected/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-kube-api-access-d2wf8\") pod \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\" (UID: \"49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9\") " Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.031417 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b4fcecc7-f191-472f-abcc-d886648e5ecc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b4fcecc7-f191-472f-abcc-d886648e5ecc" (UID: "b4fcecc7-f191-472f-abcc-d886648e5ecc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.031631 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9" (UID: "49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.037623 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-kube-api-access-d2wf8" (OuterVolumeSpecName: "kube-api-access-d2wf8") pod "49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9" (UID: "49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9"). InnerVolumeSpecName "kube-api-access-d2wf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.051112 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4fcecc7-f191-472f-abcc-d886648e5ecc-kube-api-access-2d455" (OuterVolumeSpecName: "kube-api-access-2d455") pod "b4fcecc7-f191-472f-abcc-d886648e5ecc" (UID: "b4fcecc7-f191-472f-abcc-d886648e5ecc"). InnerVolumeSpecName "kube-api-access-2d455". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.132227 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b4fcecc7-f191-472f-abcc-d886648e5ecc-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.132259 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.132269 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d455\" (UniqueName: \"kubernetes.io/projected/b4fcecc7-f191-472f-abcc-d886648e5ecc-kube-api-access-2d455\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:52 crc kubenswrapper[4690]: I0320 13:42:52.132281 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2wf8\" (UniqueName: \"kubernetes.io/projected/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9-kube-api-access-d2wf8\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:55 crc kubenswrapper[4690]: I0320 13:42:55.023001 4690 generic.go:334] "Generic (PLEG): container finished" podID="75808517-3db4-41a1-ac99-99324152c26d" containerID="7284d0860af431e8fdf37a986f140380d9d4dfdc40c07b70a50f32946980e80a" exitCode=0 Mar 20 13:42:55 crc kubenswrapper[4690]: I0320 13:42:55.023146 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-r8lpw" event={"ID":"75808517-3db4-41a1-ac99-99324152c26d","Type":"ContainerDied","Data":"7284d0860af431e8fdf37a986f140380d9d4dfdc40c07b70a50f32946980e80a"} Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.483193 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.508551 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-combined-ca-bundle\") pod \"75808517-3db4-41a1-ac99-99324152c26d\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.508828 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-config-data\") pod \"75808517-3db4-41a1-ac99-99324152c26d\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.509009 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ndmd\" (UniqueName: \"kubernetes.io/projected/75808517-3db4-41a1-ac99-99324152c26d-kube-api-access-6ndmd\") pod \"75808517-3db4-41a1-ac99-99324152c26d\" (UID: \"75808517-3db4-41a1-ac99-99324152c26d\") " Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.514497 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75808517-3db4-41a1-ac99-99324152c26d-kube-api-access-6ndmd" (OuterVolumeSpecName: "kube-api-access-6ndmd") pod "75808517-3db4-41a1-ac99-99324152c26d" (UID: "75808517-3db4-41a1-ac99-99324152c26d"). InnerVolumeSpecName "kube-api-access-6ndmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.531266 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "75808517-3db4-41a1-ac99-99324152c26d" (UID: "75808517-3db4-41a1-ac99-99324152c26d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.551209 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-config-data" (OuterVolumeSpecName: "config-data") pod "75808517-3db4-41a1-ac99-99324152c26d" (UID: "75808517-3db4-41a1-ac99-99324152c26d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.612270 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ndmd\" (UniqueName: \"kubernetes.io/projected/75808517-3db4-41a1-ac99-99324152c26d-kube-api-access-6ndmd\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.612311 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:56 crc kubenswrapper[4690]: I0320 13:42:56.612322 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75808517-3db4-41a1-ac99-99324152c26d-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.048271 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-r8lpw" event={"ID":"75808517-3db4-41a1-ac99-99324152c26d","Type":"ContainerDied","Data":"3379e20c366fa09bd271e8531331b7763289e6408bc47bb17dd45e29ba25d3f8"} Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.048322 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3379e20c366fa09bd271e8531331b7763289e6408bc47bb17dd45e29ba25d3f8" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.048342 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-r8lpw" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366107 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-72dlh"] Mar 20 13:42:57 crc kubenswrapper[4690]: E0320 13:42:57.366444 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="519b3e37-0f94-4018-97e2-7c7b0b99df0d" containerName="mariadb-account-create-update" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366465 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="519b3e37-0f94-4018-97e2-7c7b0b99df0d" containerName="mariadb-account-create-update" Mar 20 13:42:57 crc kubenswrapper[4690]: E0320 13:42:57.366477 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa" containerName="mariadb-database-create" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366485 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa" containerName="mariadb-database-create" Mar 20 13:42:57 crc kubenswrapper[4690]: E0320 13:42:57.366499 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9" containerName="mariadb-database-create" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366506 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9" containerName="mariadb-database-create" Mar 20 13:42:57 crc kubenswrapper[4690]: E0320 13:42:57.366521 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4fcecc7-f191-472f-abcc-d886648e5ecc" containerName="mariadb-account-create-update" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366528 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4fcecc7-f191-472f-abcc-d886648e5ecc" containerName="mariadb-account-create-update" Mar 20 13:42:57 crc kubenswrapper[4690]: E0320 13:42:57.366540 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="94049a0c-7da4-43be-8e15-36e9a282f728" containerName="mariadb-database-create" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366547 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="94049a0c-7da4-43be-8e15-36e9a282f728" containerName="mariadb-database-create" Mar 20 13:42:57 crc kubenswrapper[4690]: E0320 13:42:57.366567 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75808517-3db4-41a1-ac99-99324152c26d" containerName="keystone-db-sync" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366575 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="75808517-3db4-41a1-ac99-99324152c26d" containerName="keystone-db-sync" Mar 20 13:42:57 crc kubenswrapper[4690]: E0320 13:42:57.366590 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02be9901-f882-45b3-8d1e-9105f2551417" containerName="mariadb-account-create-update" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366597 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="02be9901-f882-45b3-8d1e-9105f2551417" containerName="mariadb-account-create-update" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366728 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9" containerName="mariadb-database-create" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366743 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="94049a0c-7da4-43be-8e15-36e9a282f728" containerName="mariadb-database-create" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366752 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="02be9901-f882-45b3-8d1e-9105f2551417" containerName="mariadb-account-create-update" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366763 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="519b3e37-0f94-4018-97e2-7c7b0b99df0d" containerName="mariadb-account-create-update" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366772 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="75808517-3db4-41a1-ac99-99324152c26d" containerName="keystone-db-sync" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366785 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa" containerName="mariadb-database-create" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.366795 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4fcecc7-f191-472f-abcc-d886648e5ecc" containerName="mariadb-account-create-update" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.367319 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.369538 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.369973 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.370230 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.370365 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.370576 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cwsn8" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.389258 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-72dlh"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.402780 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-n2xwf"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.405055 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.430828 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.430908 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.430941 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7srf\" (UniqueName: \"kubernetes.io/projected/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-kube-api-access-t7srf\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.430989 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-fernet-keys\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.431018 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-config-data\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.431038 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-config\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.431073 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-combined-ca-bundle\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.431098 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tzvj7\" (UniqueName: \"kubernetes.io/projected/b56f6171-754a-4f35-b04f-dffaebbff719-kube-api-access-tzvj7\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.431134 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-scripts\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.431190 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.431213 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-credential-keys\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.431247 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.446704 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-n2xwf"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.524759 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-589576fb47-w84wc"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.526094 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.534162 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.535825 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-combined-ca-bundle\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543150 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tzvj7\" (UniqueName: \"kubernetes.io/projected/b56f6171-754a-4f35-b04f-dffaebbff719-kube-api-access-tzvj7\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543216 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-scripts\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543349 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543391 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-credential-keys\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543441 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543470 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543490 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543515 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7srf\" (UniqueName: \"kubernetes.io/projected/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-kube-api-access-t7srf\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543557 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-config-data\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.535066 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.543570 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-fernet-keys\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.616402 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-config\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.535359 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.578114 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-svc\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.578125 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-nb\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.586542 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-sb\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.535514 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-t7knk" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.577664 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-combined-ca-bundle\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.619248 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-scripts\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.578076 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-swift-storage-0\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.628800 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-config\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.638214 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-fernet-keys\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.647384 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-config-data\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.650181 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7srf\" (UniqueName: \"kubernetes.io/projected/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-kube-api-access-t7srf\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.658969 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-8h88w"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.720136 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.725706 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tzvj7\" (UniqueName: \"kubernetes.io/projected/b56f6171-754a-4f35-b04f-dffaebbff719-kube-api-access-tzvj7\") pod \"dnsmasq-dns-bbf5cc879-n2xwf\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.725913 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-credential-keys\") pod \"keystone-bootstrap-72dlh\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.728406 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-scripts\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.728440 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kb26\" (UniqueName: \"kubernetes.io/projected/7826df84-54dc-49a4-9942-797331b72c57-kube-api-access-2kb26\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.728460 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7826df84-54dc-49a4-9942-797331b72c57-logs\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.728477 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7826df84-54dc-49a4-9942-797331b72c57-horizon-secret-key\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.728509 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-config-data\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.729562 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.734272 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-k2q5j" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.734521 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.735793 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-589576fb47-w84wc"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.739239 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.776694 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-6rgrr"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.777892 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.784647 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-4pfnp" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.784922 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.785078 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.831700 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-scripts\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.831768 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdcnd\" (UniqueName: \"kubernetes.io/projected/3def27d2-bdda-4c07-b4b2-f695994bd509-kube-api-access-mdcnd\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.831810 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-db-sync-config-data\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.831884 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-scripts\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.831902 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kb26\" (UniqueName: \"kubernetes.io/projected/7826df84-54dc-49a4-9942-797331b72c57-kube-api-access-2kb26\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.831920 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drdb4\" (UniqueName: \"kubernetes.io/projected/badac960-83c0-4715-b125-0fdd44ae7315-kube-api-access-drdb4\") pod \"neutron-db-sync-8h88w\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.831950 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7826df84-54dc-49a4-9942-797331b72c57-logs\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.831967 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7826df84-54dc-49a4-9942-797331b72c57-horizon-secret-key\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.832069 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-config-data\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.832123 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-combined-ca-bundle\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.832161 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-combined-ca-bundle\") pod \"neutron-db-sync-8h88w\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.832181 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-config-data\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.832263 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-config\") pod \"neutron-db-sync-8h88w\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.832285 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3def27d2-bdda-4c07-b4b2-f695994bd509-etc-machine-id\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.833289 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-scripts\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.833598 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-config-data\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.845099 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7826df84-54dc-49a4-9942-797331b72c57-horizon-secret-key\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.845456 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7826df84-54dc-49a4-9942-797331b72c57-logs\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.856749 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-8h88w"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.869893 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kb26\" (UniqueName: \"kubernetes.io/projected/7826df84-54dc-49a4-9942-797331b72c57-kube-api-access-2kb26\") pod \"horizon-589576fb47-w84wc\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.877923 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-6rgrr"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.908311 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.910295 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.914447 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.914618 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.927230 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936558 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-config-data\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936616 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-run-httpd\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936680 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-config\") pod \"neutron-db-sync-8h88w\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936702 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3def27d2-bdda-4c07-b4b2-f695994bd509-etc-machine-id\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936725 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bk5j5\" (UniqueName: \"kubernetes.io/projected/93013757-d360-41e7-92a9-211155703015-kube-api-access-bk5j5\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936769 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-log-httpd\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936793 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-scripts\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936868 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdcnd\" (UniqueName: \"kubernetes.io/projected/3def27d2-bdda-4c07-b4b2-f695994bd509-kube-api-access-mdcnd\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936904 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-db-sync-config-data\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.936998 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.937391 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drdb4\" (UniqueName: \"kubernetes.io/projected/badac960-83c0-4715-b125-0fdd44ae7315-kube-api-access-drdb4\") pod \"neutron-db-sync-8h88w\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.937433 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-scripts\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.937504 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-combined-ca-bundle\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.937550 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.937571 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-combined-ca-bundle\") pod \"neutron-db-sync-8h88w\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.937590 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-config-data\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.943863 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-config\") pod \"neutron-db-sync-8h88w\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.943935 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3def27d2-bdda-4c07-b4b2-f695994bd509-etc-machine-id\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.945192 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-db-sync-config-data\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.950423 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-scripts\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.954587 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-n2xwf"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.957885 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-combined-ca-bundle\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.963468 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-combined-ca-bundle\") pod \"neutron-db-sync-8h88w\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.964421 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-config-data\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.975589 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-vgnp6"] Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.977392 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.983130 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-zgphg" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.983304 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.984117 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:42:57 crc kubenswrapper[4690]: I0320 13:42:57.998948 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drdb4\" (UniqueName: \"kubernetes.io/projected/badac960-83c0-4715-b125-0fdd44ae7315-kube-api-access-drdb4\") pod \"neutron-db-sync-8h88w\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.000194 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-vgnp6"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.002553 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdcnd\" (UniqueName: \"kubernetes.io/projected/3def27d2-bdda-4c07-b4b2-f695994bd509-kube-api-access-mdcnd\") pod \"cinder-db-sync-6rgrr\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.010178 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-bsz48"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.011327 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.014308 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-cm226" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.014497 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.014612 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.028758 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.041300 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-tvmbw"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.043701 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.049943 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-db-sync-config-data\") pod \"barbican-db-sync-vgnp6\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.050044 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-config-data\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.050274 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.050546 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-combined-ca-bundle\") pod \"barbican-db-sync-vgnp6\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.050695 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-scripts\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.050832 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.050926 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-config-data\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.050979 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-logs\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.051014 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-scripts\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.051054 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-run-httpd\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.051091 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-combined-ca-bundle\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.051129 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mp9kl\" (UniqueName: \"kubernetes.io/projected/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-kube-api-access-mp9kl\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.051397 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bk5j5\" (UniqueName: \"kubernetes.io/projected/93013757-d360-41e7-92a9-211155703015-kube-api-access-bk5j5\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.051455 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-log-httpd\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.051524 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h76pl\" (UniqueName: \"kubernetes.io/projected/6629e615-4e98-4e99-b7dc-6990b379d93c-kube-api-access-h76pl\") pod \"barbican-db-sync-vgnp6\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.051665 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-run-httpd\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.052400 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-log-httpd\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.056702 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-config-data\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.061603 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.062131 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8h88w" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.063332 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bsz48"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.065666 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.065886 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-scripts\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.079380 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bk5j5\" (UniqueName: \"kubernetes.io/projected/93013757-d360-41e7-92a9-211155703015-kube-api-access-bk5j5\") pod \"ceilometer-0\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.105277 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.115932 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-tvmbw"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.128816 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-5b696cf447-hntld"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.130554 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.142902 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.144195 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.148040 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-bch99" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.148406 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.148500 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154013 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gxxr\" (UniqueName: \"kubernetes.io/projected/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-kube-api-access-9gxxr\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154071 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-combined-ca-bundle\") pod \"barbican-db-sync-vgnp6\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154105 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154129 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154149 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc7da65-b3f8-4246-abfe-573fa358fa2d-logs\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154169 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154198 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78q6f\" (UniqueName: \"kubernetes.io/projected/afc7da65-b3f8-4246-abfe-573fa358fa2d-kube-api-access-78q6f\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154237 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-config\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154257 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-logs\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154273 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-scripts\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154293 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-combined-ca-bundle\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154310 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-config-data\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154338 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154358 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mp9kl\" (UniqueName: \"kubernetes.io/projected/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-kube-api-access-mp9kl\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154391 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h76pl\" (UniqueName: \"kubernetes.io/projected/6629e615-4e98-4e99-b7dc-6990b379d93c-kube-api-access-h76pl\") pod \"barbican-db-sync-vgnp6\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154420 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-scripts\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154441 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-config-data\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154456 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-db-sync-config-data\") pod \"barbican-db-sync-vgnp6\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.154481 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/afc7da65-b3f8-4246-abfe-573fa358fa2d-horizon-secret-key\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.159014 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-scripts\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.159052 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.159490 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-logs\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.159721 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b696cf447-hntld"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.161033 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-combined-ca-bundle\") pod \"barbican-db-sync-vgnp6\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.168431 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-db-sync-config-data\") pod \"barbican-db-sync-vgnp6\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.185017 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.185980 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h76pl\" (UniqueName: \"kubernetes.io/projected/6629e615-4e98-4e99-b7dc-6990b379d93c-kube-api-access-h76pl\") pod \"barbican-db-sync-vgnp6\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.190779 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mp9kl\" (UniqueName: \"kubernetes.io/projected/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-kube-api-access-mp9kl\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.193106 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-combined-ca-bundle\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.194857 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-config-data\") pod \"placement-db-sync-bsz48\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255142 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-scripts\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255181 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-logs\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255203 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255224 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-config-data\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255242 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/afc7da65-b3f8-4246-abfe-573fa358fa2d-horizon-secret-key\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255262 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztrpp\" (UniqueName: \"kubernetes.io/projected/b1a8a9e9-95bf-4dca-9080-6b8a73296057-kube-api-access-ztrpp\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255314 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255340 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gxxr\" (UniqueName: \"kubernetes.io/projected/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-kube-api-access-9gxxr\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255365 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255382 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255407 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc7da65-b3f8-4246-abfe-573fa358fa2d-logs\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255427 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255448 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78q6f\" (UniqueName: \"kubernetes.io/projected/afc7da65-b3f8-4246-abfe-573fa358fa2d-kube-api-access-78q6f\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255474 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-config\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255491 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255507 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-scripts\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255526 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-config-data\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255541 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.255574 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.256269 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-scripts\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.256890 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-sb\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.257622 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-nb\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.261493 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-svc\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.261779 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc7da65-b3f8-4246-abfe-573fa358fa2d-logs\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.262639 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-config-data\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.263692 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-swift-storage-0\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.264149 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-config\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.272555 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.276459 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/afc7da65-b3f8-4246-abfe-573fa358fa2d-horizon-secret-key\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.283922 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gxxr\" (UniqueName: \"kubernetes.io/projected/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-kube-api-access-9gxxr\") pod \"dnsmasq-dns-56df8fb6b7-tvmbw\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.289603 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78q6f\" (UniqueName: \"kubernetes.io/projected/afc7da65-b3f8-4246-abfe-573fa358fa2d-kube-api-access-78q6f\") pod \"horizon-5b696cf447-hntld\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.303922 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.339635 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bsz48" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.349960 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-n2xwf"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.364388 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.364438 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-scripts\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.364490 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.364526 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-logs\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.364563 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.364585 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-config-data\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.364607 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztrpp\" (UniqueName: \"kubernetes.io/projected/b1a8a9e9-95bf-4dca-9080-6b8a73296057-kube-api-access-ztrpp\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.364649 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.365101 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.373626 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.374090 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.374313 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-logs\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.382369 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.384456 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-scripts\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.391630 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.440155 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-config-data\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.447451 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.501632 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.543841 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztrpp\" (UniqueName: \"kubernetes.io/projected/b1a8a9e9-95bf-4dca-9080-6b8a73296057-kube-api-access-ztrpp\") pod \"glance-default-external-api-0\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.560576 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.669228 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-589576fb47-w84wc"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.731788 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.734188 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.736911 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.739260 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Mar 20 13:42:58 crc kubenswrapper[4690]: W0320 13:42:58.739838 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddd44759e_3f7b_4b74_9ddc_94bc2dfc3dfa.slice/crio-18f2d07c11b93f21aaddfc59afdfb8deb7a74168b2fc5032d5682135141464db WatchSource:0}: Error finding container 18f2d07c11b93f21aaddfc59afdfb8deb7a74168b2fc5032d5682135141464db: Status 404 returned error can't find the container with id 18f2d07c11b93f21aaddfc59afdfb8deb7a74168b2fc5032d5682135141464db Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.762576 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.800151 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-72dlh"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.821335 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.821382 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.821422 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.821441 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.821455 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-logs\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.821521 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.821540 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fwq8q\" (UniqueName: \"kubernetes.io/projected/0f5b6beb-18db-41c1-9434-96a715d3de80-kube-api-access-fwq8q\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.821567 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.924042 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.924385 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.924430 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.924445 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-logs\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.924464 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.924532 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.924555 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fwq8q\" (UniqueName: \"kubernetes.io/projected/0f5b6beb-18db-41c1-9434-96a715d3de80-kube-api-access-fwq8q\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.924581 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.926195 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-logs\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.929416 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.934026 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.948510 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.948533 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-scripts\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.951355 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-config-data\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.956017 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.963766 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-8h88w"] Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.971770 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fwq8q\" (UniqueName: \"kubernetes.io/projected/0f5b6beb-18db-41c1-9434-96a715d3de80-kube-api-access-fwq8q\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:58 crc kubenswrapper[4690]: I0320 13:42:58.989418 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-6rgrr"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.032946 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.088889 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-589576fb47-w84wc" event={"ID":"7826df84-54dc-49a4-9942-797331b72c57","Type":"ContainerStarted","Data":"b7a6b8a23c92215aab3297e3b36133e87b429cf753d8da496e7a6fe288bf7434"} Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.093213 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-72dlh" event={"ID":"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa","Type":"ContainerStarted","Data":"18f2d07c11b93f21aaddfc59afdfb8deb7a74168b2fc5032d5682135141464db"} Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.097276 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-6rgrr" event={"ID":"3def27d2-bdda-4c07-b4b2-f695994bd509","Type":"ContainerStarted","Data":"48aaaa77378b8c665204b752ab8dae8e229a5b47d26cbc496c4d6280d5f8650a"} Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.099428 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" event={"ID":"b56f6171-754a-4f35-b04f-dffaebbff719","Type":"ContainerStarted","Data":"6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d"} Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.099461 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" event={"ID":"b56f6171-754a-4f35-b04f-dffaebbff719","Type":"ContainerStarted","Data":"33874b9f143433834716cc5e9c6dbad7acf0bd08741b681019da16226db6a488"} Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.099571 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" podUID="b56f6171-754a-4f35-b04f-dffaebbff719" containerName="init" containerID="cri-o://6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d" gracePeriod=10 Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.112891 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8h88w" event={"ID":"badac960-83c0-4715-b125-0fdd44ae7315","Type":"ContainerStarted","Data":"3c7bb7f5ff6cdc8da2e14b01d1d80c747fafba1f33497349ccc7de5465f7119d"} Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.117495 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-72dlh" podStartSLOduration=2.117473505 podStartE2EDuration="2.117473505s" podCreationTimestamp="2026-03-20 13:42:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:42:59.111900305 +0000 UTC m=+1225.401500238" watchObservedRunningTime="2026-03-20 13:42:59.117473505 +0000 UTC m=+1225.407073448" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.171006 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.317903 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-bsz48"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.324509 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-vgnp6"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.344883 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.428404 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.460351 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-589576fb47-w84wc"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.536999 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7969d5769c-bbm4p"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.545602 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.569432 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7969d5769c-bbm4p"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.601488 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.655691 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.658742 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv2nk\" (UniqueName: \"kubernetes.io/projected/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-kube-api-access-kv2nk\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.658925 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-config-data\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.658996 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-logs\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.659040 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-horizon-secret-key\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.659090 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-scripts\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: W0320 13:42:59.745279 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1a8a9e9_95bf_4dca_9080_6b8a73296057.slice/crio-e345df0e1d6115ed65c3b67fc645370fd24ae246525d927acfb750fe95b052fb WatchSource:0}: Error finding container e345df0e1d6115ed65c3b67fc645370fd24ae246525d927acfb750fe95b052fb: Status 404 returned error can't find the container with id e345df0e1d6115ed65c3b67fc645370fd24ae246525d927acfb750fe95b052fb Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.753939 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-5b696cf447-hntld"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.761542 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.762765 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-logs\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.762816 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-horizon-secret-key\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.762882 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-scripts\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.762907 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv2nk\" (UniqueName: \"kubernetes.io/projected/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-kube-api-access-kv2nk\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.762950 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-config-data\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.764328 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-config-data\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.764757 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-scripts\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.766741 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-logs\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.781256 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-horizon-secret-key\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.788181 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-tvmbw"] Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.820511 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv2nk\" (UniqueName: \"kubernetes.io/projected/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-kube-api-access-kv2nk\") pod \"horizon-7969d5769c-bbm4p\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.865215 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.874171 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.964965 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-config\") pod \"b56f6171-754a-4f35-b04f-dffaebbff719\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.965019 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-swift-storage-0\") pod \"b56f6171-754a-4f35-b04f-dffaebbff719\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.965077 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-svc\") pod \"b56f6171-754a-4f35-b04f-dffaebbff719\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.965130 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-nb\") pod \"b56f6171-754a-4f35-b04f-dffaebbff719\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.965192 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tzvj7\" (UniqueName: \"kubernetes.io/projected/b56f6171-754a-4f35-b04f-dffaebbff719-kube-api-access-tzvj7\") pod \"b56f6171-754a-4f35-b04f-dffaebbff719\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.965548 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-sb\") pod \"b56f6171-754a-4f35-b04f-dffaebbff719\" (UID: \"b56f6171-754a-4f35-b04f-dffaebbff719\") " Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.971121 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b56f6171-754a-4f35-b04f-dffaebbff719-kube-api-access-tzvj7" (OuterVolumeSpecName: "kube-api-access-tzvj7") pod "b56f6171-754a-4f35-b04f-dffaebbff719" (UID: "b56f6171-754a-4f35-b04f-dffaebbff719"). InnerVolumeSpecName "kube-api-access-tzvj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:42:59 crc kubenswrapper[4690]: I0320 13:42:59.990355 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b56f6171-754a-4f35-b04f-dffaebbff719" (UID: "b56f6171-754a-4f35-b04f-dffaebbff719"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.002140 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b56f6171-754a-4f35-b04f-dffaebbff719" (UID: "b56f6171-754a-4f35-b04f-dffaebbff719"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.006036 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.014254 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b56f6171-754a-4f35-b04f-dffaebbff719" (UID: "b56f6171-754a-4f35-b04f-dffaebbff719"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.016736 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b56f6171-754a-4f35-b04f-dffaebbff719" (UID: "b56f6171-754a-4f35-b04f-dffaebbff719"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.039053 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-config" (OuterVolumeSpecName: "config") pod "b56f6171-754a-4f35-b04f-dffaebbff719" (UID: "b56f6171-754a-4f35-b04f-dffaebbff719"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.089371 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.089501 4690 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.089540 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.089570 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.089583 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tzvj7\" (UniqueName: \"kubernetes.io/projected/b56f6171-754a-4f35-b04f-dffaebbff719-kube-api-access-tzvj7\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.089592 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b56f6171-754a-4f35-b04f-dffaebbff719-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.143035 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-72dlh" event={"ID":"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa","Type":"ContainerStarted","Data":"e64f2bfa701432df86f7551d860275c933b7969c346bc788dacec4f122f93905"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.152936 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bsz48" event={"ID":"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9","Type":"ContainerStarted","Data":"ee8f55ed9458a8845e372ebedb8d7937bf996410ea0089a7730d80120f99488f"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.156312 4690 generic.go:334] "Generic (PLEG): container finished" podID="b56f6171-754a-4f35-b04f-dffaebbff719" containerID="6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d" exitCode=0 Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.156361 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" event={"ID":"b56f6171-754a-4f35-b04f-dffaebbff719","Type":"ContainerDied","Data":"6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.156381 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" event={"ID":"b56f6171-754a-4f35-b04f-dffaebbff719","Type":"ContainerDied","Data":"33874b9f143433834716cc5e9c6dbad7acf0bd08741b681019da16226db6a488"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.156403 4690 scope.go:117] "RemoveContainer" containerID="6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.157423 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bbf5cc879-n2xwf" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.169559 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vgnp6" event={"ID":"6629e615-4e98-4e99-b7dc-6990b379d93c","Type":"ContainerStarted","Data":"2c3bab4dbfca96331ae9696e484d8bd83307e59d0d795d4b5256caa0134061fc"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.187505 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8h88w" event={"ID":"badac960-83c0-4715-b125-0fdd44ae7315","Type":"ContainerStarted","Data":"6eb961c8d2c437e3c78c36daeeee88a831078f66dc0a9d85833a5bc4727e0274"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.201605 4690 scope.go:117] "RemoveContainer" containerID="6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d" Mar 20 13:43:00 crc kubenswrapper[4690]: E0320 13:43:00.205977 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d\": container with ID starting with 6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d not found: ID does not exist" containerID="6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.206036 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d"} err="failed to get container status \"6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d\": rpc error: code = NotFound desc = could not find container \"6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d\": container with ID starting with 6363b93d7866827c625661abfcbaf8015d45712f2275e29cc3e127ab1971122d not found: ID does not exist" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.211211 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"93013757-d360-41e7-92a9-211155703015","Type":"ContainerStarted","Data":"47e62e6e8ea51111fd27e6969e7993fb093220f9f056010d7ec78a1f6323d0a6"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.232622 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-n2xwf"] Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.233824 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a8a9e9-95bf-4dca-9080-6b8a73296057","Type":"ContainerStarted","Data":"e345df0e1d6115ed65c3b67fc645370fd24ae246525d927acfb750fe95b052fb"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.237305 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" event={"ID":"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9","Type":"ContainerStarted","Data":"f8f277a382d93fdab257b751d8c1a0b99beff26e373c9964b2eee2bffecfde85"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.239165 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f5b6beb-18db-41c1-9434-96a715d3de80","Type":"ContainerStarted","Data":"709aefb1c1a7f783e220304b74518a75d565ad8591db490d95f3d47d36e8c2ca"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.241047 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b696cf447-hntld" event={"ID":"afc7da65-b3f8-4246-abfe-573fa358fa2d","Type":"ContainerStarted","Data":"5068568503ec6c9eddd54b676a7bea8fb8b30d5df1bb5cae2e01c0425fcb42c8"} Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.277041 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bbf5cc879-n2xwf"] Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.294441 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-8h88w" podStartSLOduration=3.294421599 podStartE2EDuration="3.294421599s" podCreationTimestamp="2026-03-20 13:42:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:00.232579875 +0000 UTC m=+1226.522179828" watchObservedRunningTime="2026-03-20 13:43:00.294421599 +0000 UTC m=+1226.584021542" Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.382691 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7969d5769c-bbm4p"] Mar 20 13:43:00 crc kubenswrapper[4690]: I0320 13:43:00.427788 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b56f6171-754a-4f35-b04f-dffaebbff719" path="/var/lib/kubelet/pods/b56f6171-754a-4f35-b04f-dffaebbff719/volumes" Mar 20 13:43:01 crc kubenswrapper[4690]: I0320 13:43:01.258402 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7969d5769c-bbm4p" event={"ID":"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08","Type":"ContainerStarted","Data":"eaf515275b9baab121378b401e0e4035923ed6c6dd8a4c6b9155d133222d7b59"} Mar 20 13:43:01 crc kubenswrapper[4690]: I0320 13:43:01.261621 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a8a9e9-95bf-4dca-9080-6b8a73296057","Type":"ContainerStarted","Data":"0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3"} Mar 20 13:43:01 crc kubenswrapper[4690]: I0320 13:43:01.269207 4690 generic.go:334] "Generic (PLEG): container finished" podID="7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" containerID="04d7117df6606306ba94e201715c2698965696f09fd36e752c65d4ddc2b22c24" exitCode=0 Mar 20 13:43:01 crc kubenswrapper[4690]: I0320 13:43:01.269322 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" event={"ID":"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9","Type":"ContainerDied","Data":"04d7117df6606306ba94e201715c2698965696f09fd36e752c65d4ddc2b22c24"} Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.295725 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a8a9e9-95bf-4dca-9080-6b8a73296057","Type":"ContainerStarted","Data":"bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c"} Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.296762 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerName="glance-log" containerID="cri-o://0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3" gracePeriod=30 Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.297371 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerName="glance-httpd" containerID="cri-o://bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c" gracePeriod=30 Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.329606 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" event={"ID":"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9","Type":"ContainerStarted","Data":"43c5808746fbf3e1dfadb875df49c35f9ef5900dda0c10297072f87eaf4a556f"} Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.329966 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.343383 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.343364075 podStartE2EDuration="5.343364075s" podCreationTimestamp="2026-03-20 13:42:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:02.318817781 +0000 UTC m=+1228.608417724" watchObservedRunningTime="2026-03-20 13:43:02.343364075 +0000 UTC m=+1228.632964028" Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.349333 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f5b6beb-18db-41c1-9434-96a715d3de80","Type":"ContainerStarted","Data":"0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4"} Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.349381 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f5b6beb-18db-41c1-9434-96a715d3de80","Type":"ContainerStarted","Data":"8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d"} Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.349506 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerName="glance-log" containerID="cri-o://8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d" gracePeriod=30 Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.350461 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerName="glance-httpd" containerID="cri-o://0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4" gracePeriod=30 Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.362036 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" podStartSLOduration=5.362015471 podStartE2EDuration="5.362015471s" podCreationTimestamp="2026-03-20 13:42:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:02.355106212 +0000 UTC m=+1228.644706155" watchObservedRunningTime="2026-03-20 13:43:02.362015471 +0000 UTC m=+1228.651615414" Mar 20 13:43:02 crc kubenswrapper[4690]: I0320 13:43:02.385958 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.38464941 podStartE2EDuration="5.38464941s" podCreationTimestamp="2026-03-20 13:42:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:02.37278345 +0000 UTC m=+1228.662383393" watchObservedRunningTime="2026-03-20 13:43:02.38464941 +0000 UTC m=+1228.674249343" Mar 20 13:43:02 crc kubenswrapper[4690]: E0320 13:43:02.510491 4690 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1a8a9e9_95bf_4dca_9080_6b8a73296057.slice/crio-conmon-0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1a8a9e9_95bf_4dca_9080_6b8a73296057.slice/crio-bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f5b6beb_18db_41c1_9434_96a715d3de80.slice/crio-0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0f5b6beb_18db_41c1_9434_96a715d3de80.slice/crio-8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb1a8a9e9_95bf_4dca_9080_6b8a73296057.slice/crio-conmon-bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c.scope\": RecentStats: unable to find data in memory cache]" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.001692 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.100531 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-public-tls-certs\") pod \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.100727 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-logs\") pod \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.100787 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-combined-ca-bundle\") pod \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.100823 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-config-data\") pod \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.100938 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-scripts\") pod \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.100988 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-httpd-run\") pod \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.101020 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ztrpp\" (UniqueName: \"kubernetes.io/projected/b1a8a9e9-95bf-4dca-9080-6b8a73296057-kube-api-access-ztrpp\") pod \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.101128 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\" (UID: \"b1a8a9e9-95bf-4dca-9080-6b8a73296057\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.101746 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b1a8a9e9-95bf-4dca-9080-6b8a73296057" (UID: "b1a8a9e9-95bf-4dca-9080-6b8a73296057"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.101889 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-logs" (OuterVolumeSpecName: "logs") pod "b1a8a9e9-95bf-4dca-9080-6b8a73296057" (UID: "b1a8a9e9-95bf-4dca-9080-6b8a73296057"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.109094 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1a8a9e9-95bf-4dca-9080-6b8a73296057-kube-api-access-ztrpp" (OuterVolumeSpecName: "kube-api-access-ztrpp") pod "b1a8a9e9-95bf-4dca-9080-6b8a73296057" (UID: "b1a8a9e9-95bf-4dca-9080-6b8a73296057"). InnerVolumeSpecName "kube-api-access-ztrpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.109347 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "b1a8a9e9-95bf-4dca-9080-6b8a73296057" (UID: "b1a8a9e9-95bf-4dca-9080-6b8a73296057"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.111015 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-scripts" (OuterVolumeSpecName: "scripts") pod "b1a8a9e9-95bf-4dca-9080-6b8a73296057" (UID: "b1a8a9e9-95bf-4dca-9080-6b8a73296057"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.166908 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1a8a9e9-95bf-4dca-9080-6b8a73296057" (UID: "b1a8a9e9-95bf-4dca-9080-6b8a73296057"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.171280 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-config-data" (OuterVolumeSpecName: "config-data") pod "b1a8a9e9-95bf-4dca-9080-6b8a73296057" (UID: "b1a8a9e9-95bf-4dca-9080-6b8a73296057"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.186522 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.187238 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "b1a8a9e9-95bf-4dca-9080-6b8a73296057" (UID: "b1a8a9e9-95bf-4dca-9080-6b8a73296057"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.209331 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.209374 4690 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-httpd-run\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.209390 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ztrpp\" (UniqueName: \"kubernetes.io/projected/b1a8a9e9-95bf-4dca-9080-6b8a73296057-kube-api-access-ztrpp\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.209435 4690 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.209448 4690 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.209460 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a8a9e9-95bf-4dca-9080-6b8a73296057-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.209471 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.209482 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a8a9e9-95bf-4dca-9080-6b8a73296057-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.235236 4690 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.310694 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-combined-ca-bundle\") pod \"0f5b6beb-18db-41c1-9434-96a715d3de80\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.310769 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"0f5b6beb-18db-41c1-9434-96a715d3de80\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.310796 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-internal-tls-certs\") pod \"0f5b6beb-18db-41c1-9434-96a715d3de80\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.310996 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fwq8q\" (UniqueName: \"kubernetes.io/projected/0f5b6beb-18db-41c1-9434-96a715d3de80-kube-api-access-fwq8q\") pod \"0f5b6beb-18db-41c1-9434-96a715d3de80\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.311080 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-scripts\") pod \"0f5b6beb-18db-41c1-9434-96a715d3de80\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.311131 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-config-data\") pod \"0f5b6beb-18db-41c1-9434-96a715d3de80\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.311156 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-httpd-run\") pod \"0f5b6beb-18db-41c1-9434-96a715d3de80\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.311190 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-logs\") pod \"0f5b6beb-18db-41c1-9434-96a715d3de80\" (UID: \"0f5b6beb-18db-41c1-9434-96a715d3de80\") " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.312150 4690 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.312483 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-logs" (OuterVolumeSpecName: "logs") pod "0f5b6beb-18db-41c1-9434-96a715d3de80" (UID: "0f5b6beb-18db-41c1-9434-96a715d3de80"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.315081 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f5b6beb-18db-41c1-9434-96a715d3de80-kube-api-access-fwq8q" (OuterVolumeSpecName: "kube-api-access-fwq8q") pod "0f5b6beb-18db-41c1-9434-96a715d3de80" (UID: "0f5b6beb-18db-41c1-9434-96a715d3de80"). InnerVolumeSpecName "kube-api-access-fwq8q". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.315837 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "0f5b6beb-18db-41c1-9434-96a715d3de80" (UID: "0f5b6beb-18db-41c1-9434-96a715d3de80"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.316006 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-scripts" (OuterVolumeSpecName: "scripts") pod "0f5b6beb-18db-41c1-9434-96a715d3de80" (UID: "0f5b6beb-18db-41c1-9434-96a715d3de80"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.317211 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "0f5b6beb-18db-41c1-9434-96a715d3de80" (UID: "0f5b6beb-18db-41c1-9434-96a715d3de80"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.350757 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0f5b6beb-18db-41c1-9434-96a715d3de80" (UID: "0f5b6beb-18db-41c1-9434-96a715d3de80"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.371729 4690 generic.go:334] "Generic (PLEG): container finished" podID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerID="bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c" exitCode=143 Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.371762 4690 generic.go:334] "Generic (PLEG): container finished" podID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerID="0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3" exitCode=143 Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.371807 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a8a9e9-95bf-4dca-9080-6b8a73296057","Type":"ContainerDied","Data":"bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c"} Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.371834 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a8a9e9-95bf-4dca-9080-6b8a73296057","Type":"ContainerDied","Data":"0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3"} Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.371858 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a8a9e9-95bf-4dca-9080-6b8a73296057","Type":"ContainerDied","Data":"e345df0e1d6115ed65c3b67fc645370fd24ae246525d927acfb750fe95b052fb"} Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.371874 4690 scope.go:117] "RemoveContainer" containerID="bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.371924 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.380976 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-config-data" (OuterVolumeSpecName: "config-data") pod "0f5b6beb-18db-41c1-9434-96a715d3de80" (UID: "0f5b6beb-18db-41c1-9434-96a715d3de80"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.390097 4690 generic.go:334] "Generic (PLEG): container finished" podID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerID="0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4" exitCode=143 Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.390129 4690 generic.go:334] "Generic (PLEG): container finished" podID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerID="8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d" exitCode=143 Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.390203 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.390259 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f5b6beb-18db-41c1-9434-96a715d3de80","Type":"ContainerDied","Data":"0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4"} Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.390303 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f5b6beb-18db-41c1-9434-96a715d3de80","Type":"ContainerDied","Data":"8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d"} Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.390316 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"0f5b6beb-18db-41c1-9434-96a715d3de80","Type":"ContainerDied","Data":"709aefb1c1a7f783e220304b74518a75d565ad8591db490d95f3d47d36e8c2ca"} Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.392584 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "0f5b6beb-18db-41c1-9434-96a715d3de80" (UID: "0f5b6beb-18db-41c1-9434-96a715d3de80"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.414408 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.414439 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.414450 4690 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-httpd-run\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.414459 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0f5b6beb-18db-41c1-9434-96a715d3de80-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.414467 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.414495 4690 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.414504 4690 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/0f5b6beb-18db-41c1-9434-96a715d3de80-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.414514 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fwq8q\" (UniqueName: \"kubernetes.io/projected/0f5b6beb-18db-41c1-9434-96a715d3de80-kube-api-access-fwq8q\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.442766 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.454754 4690 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.480969 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.489894 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:03 crc kubenswrapper[4690]: E0320 13:43:03.490449 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerName="glance-log" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490467 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerName="glance-log" Mar 20 13:43:03 crc kubenswrapper[4690]: E0320 13:43:03.490492 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b56f6171-754a-4f35-b04f-dffaebbff719" containerName="init" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490499 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b56f6171-754a-4f35-b04f-dffaebbff719" containerName="init" Mar 20 13:43:03 crc kubenswrapper[4690]: E0320 13:43:03.490514 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerName="glance-log" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490520 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerName="glance-log" Mar 20 13:43:03 crc kubenswrapper[4690]: E0320 13:43:03.490532 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerName="glance-httpd" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490538 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerName="glance-httpd" Mar 20 13:43:03 crc kubenswrapper[4690]: E0320 13:43:03.490547 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerName="glance-httpd" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490553 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerName="glance-httpd" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490731 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerName="glance-log" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490741 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerName="glance-log" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490748 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f5b6beb-18db-41c1-9434-96a715d3de80" containerName="glance-httpd" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490761 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="b56f6171-754a-4f35-b04f-dffaebbff719" containerName="init" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.490767 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" containerName="glance-httpd" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.492345 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.493039 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.495824 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.495864 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.516403 4690 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.617524 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.617755 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-scripts\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.617803 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.617837 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.617881 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4qrzg\" (UniqueName: \"kubernetes.io/projected/c70c11e8-bf26-41f0-9ca7-d135428c216e-kube-api-access-4qrzg\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.617910 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.617948 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-logs\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.618054 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-config-data\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.719449 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.719518 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-scripts\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.719561 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.719593 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.719625 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4qrzg\" (UniqueName: \"kubernetes.io/projected/c70c11e8-bf26-41f0-9ca7-d135428c216e-kube-api-access-4qrzg\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.719641 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.719681 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-logs\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.719699 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-config-data\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.723614 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.725345 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-logs\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.728088 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-scripts\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.728817 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.743305 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.743730 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.752983 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4qrzg\" (UniqueName: \"kubernetes.io/projected/c70c11e8-bf26-41f0-9ca7-d135428c216e-kube-api-access-4qrzg\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.753506 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.754579 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-config-data\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.756780 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.771101 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.772627 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.778884 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.799983 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.800260 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.822335 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.822371 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-logs\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.822397 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.822418 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.822479 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svszk\" (UniqueName: \"kubernetes.io/projected/e43c11ca-5233-4250-b0cb-8b814c19f794-kube-api-access-svszk\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.822546 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.822572 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.822597 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.861826 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.924662 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.924718 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.924746 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-logs\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.924769 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.924789 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.924866 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svszk\" (UniqueName: \"kubernetes.io/projected/e43c11ca-5233-4250-b0cb-8b814c19f794-kube-api-access-svszk\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.924926 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.924948 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.925037 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.926247 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-logs\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.927322 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.930331 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-scripts\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.930623 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.933583 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.934489 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-config-data\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.945039 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svszk\" (UniqueName: \"kubernetes.io/projected/e43c11ca-5233-4250-b0cb-8b814c19f794-kube-api-access-svszk\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:03 crc kubenswrapper[4690]: I0320 13:43:03.951240 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:04 crc kubenswrapper[4690]: I0320 13:43:04.129363 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:43:04 crc kubenswrapper[4690]: I0320 13:43:04.132360 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:04 crc kubenswrapper[4690]: I0320 13:43:04.413773 4690 generic.go:334] "Generic (PLEG): container finished" podID="dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" containerID="e64f2bfa701432df86f7551d860275c933b7969c346bc788dacec4f122f93905" exitCode=0 Mar 20 13:43:04 crc kubenswrapper[4690]: I0320 13:43:04.414089 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-72dlh" event={"ID":"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa","Type":"ContainerDied","Data":"e64f2bfa701432df86f7551d860275c933b7969c346bc788dacec4f122f93905"} Mar 20 13:43:04 crc kubenswrapper[4690]: I0320 13:43:04.465387 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0f5b6beb-18db-41c1-9434-96a715d3de80" path="/var/lib/kubelet/pods/0f5b6beb-18db-41c1-9434-96a715d3de80/volumes" Mar 20 13:43:04 crc kubenswrapper[4690]: I0320 13:43:04.466470 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1a8a9e9-95bf-4dca-9080-6b8a73296057" path="/var/lib/kubelet/pods/b1a8a9e9-95bf-4dca-9080-6b8a73296057/volumes" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.444067 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5b696cf447-hntld"] Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.481257 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-587c585984-xs7nl"] Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.482702 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.493587 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-587c585984-xs7nl"] Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.496666 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.503084 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-secret-key\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.503201 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-tls-certs\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.503277 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-scripts\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.503323 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-combined-ca-bundle\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.503379 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae74738f-0b10-4955-97fb-e892ca7102a0-logs\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.503478 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-config-data\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.503776 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4ltl6\" (UniqueName: \"kubernetes.io/projected/ae74738f-0b10-4955-97fb-e892ca7102a0-kube-api-access-4ltl6\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.565066 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.594773 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.606057 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4ltl6\" (UniqueName: \"kubernetes.io/projected/ae74738f-0b10-4955-97fb-e892ca7102a0-kube-api-access-4ltl6\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.606320 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-secret-key\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.606466 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-tls-certs\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.606615 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-scripts\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.606719 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-combined-ca-bundle\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.606830 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae74738f-0b10-4955-97fb-e892ca7102a0-logs\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.606997 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-config-data\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.609164 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-scripts\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.609235 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-config-data\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.609495 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae74738f-0b10-4955-97fb-e892ca7102a0-logs\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.611025 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7969d5769c-bbm4p"] Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.617034 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-combined-ca-bundle\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.617075 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-secret-key\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.628756 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-tls-certs\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.635070 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4ltl6\" (UniqueName: \"kubernetes.io/projected/ae74738f-0b10-4955-97fb-e892ca7102a0-kube-api-access-4ltl6\") pod \"horizon-587c585984-xs7nl\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.644814 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7946cd7f64-rm6mr"] Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.649893 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.680096 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7946cd7f64-rm6mr"] Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.708598 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-scripts\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.708644 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-config-data\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.708688 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-logs\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.708738 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-horizon-tls-certs\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.708764 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-horizon-secret-key\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.708786 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hvkxd\" (UniqueName: \"kubernetes.io/projected/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-kube-api-access-hvkxd\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.708808 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-combined-ca-bundle\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.810238 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-horizon-tls-certs\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.810693 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-horizon-secret-key\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.810736 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hvkxd\" (UniqueName: \"kubernetes.io/projected/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-kube-api-access-hvkxd\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.810771 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-combined-ca-bundle\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.810876 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-scripts\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.810920 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-config-data\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.811015 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-logs\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.811441 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-logs\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.812569 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-scripts\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.813434 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-horizon-tls-certs\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.813699 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.814064 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-config-data\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.814611 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-horizon-secret-key\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.827773 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-combined-ca-bundle\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:06 crc kubenswrapper[4690]: I0320 13:43:06.830375 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hvkxd\" (UniqueName: \"kubernetes.io/projected/ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596-kube-api-access-hvkxd\") pod \"horizon-7946cd7f64-rm6mr\" (UID: \"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596\") " pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:07 crc kubenswrapper[4690]: I0320 13:43:07.015367 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:08 crc kubenswrapper[4690]: I0320 13:43:08.386734 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:43:08 crc kubenswrapper[4690]: I0320 13:43:08.454022 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-fdmgt"] Mar 20 13:43:08 crc kubenswrapper[4690]: I0320 13:43:08.454337 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="dnsmasq-dns" containerID="cri-o://b609a331b6c8b2fcb43e5bb73f626c651e6f9c616a53f06abf6ef6b7cd424c9e" gracePeriod=10 Mar 20 13:43:08 crc kubenswrapper[4690]: I0320 13:43:08.502886 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Mar 20 13:43:09 crc kubenswrapper[4690]: I0320 13:43:09.487275 4690 generic.go:334] "Generic (PLEG): container finished" podID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerID="b609a331b6c8b2fcb43e5bb73f626c651e6f9c616a53f06abf6ef6b7cd424c9e" exitCode=0 Mar 20 13:43:09 crc kubenswrapper[4690]: I0320 13:43:09.487592 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" event={"ID":"9e320fee-cd78-4d19-b2ac-23dd935a0894","Type":"ContainerDied","Data":"b609a331b6c8b2fcb43e5bb73f626c651e6f9c616a53f06abf6ef6b7cd424c9e"} Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.261881 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.290098 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-scripts\") pod \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.290174 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-credential-keys\") pod \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.290280 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-config-data\") pod \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.290313 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t7srf\" (UniqueName: \"kubernetes.io/projected/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-kube-api-access-t7srf\") pod \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.290355 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-fernet-keys\") pod \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.290421 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-combined-ca-bundle\") pod \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\" (UID: \"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa\") " Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.302909 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-scripts" (OuterVolumeSpecName: "scripts") pod "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" (UID: "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.303266 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-kube-api-access-t7srf" (OuterVolumeSpecName: "kube-api-access-t7srf") pod "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" (UID: "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa"). InnerVolumeSpecName "kube-api-access-t7srf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.305954 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" (UID: "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.311209 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" (UID: "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.328276 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" (UID: "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.333135 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-config-data" (OuterVolumeSpecName: "config-data") pod "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" (UID: "dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.392943 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.392975 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t7srf\" (UniqueName: \"kubernetes.io/projected/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-kube-api-access-t7srf\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.392986 4690 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-fernet-keys\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.392996 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.393004 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.393013 4690 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa-credential-keys\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.518196 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-72dlh" event={"ID":"dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa","Type":"ContainerDied","Data":"18f2d07c11b93f21aaddfc59afdfb8deb7a74168b2fc5032d5682135141464db"} Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.518254 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18f2d07c11b93f21aaddfc59afdfb8deb7a74168b2fc5032d5682135141464db" Mar 20 13:43:10 crc kubenswrapper[4690]: I0320 13:43:10.518357 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-72dlh" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.448904 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-72dlh"] Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.456812 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-72dlh"] Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.546696 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-42k9z"] Mar 20 13:43:11 crc kubenswrapper[4690]: E0320 13:43:11.547155 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" containerName="keystone-bootstrap" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.547172 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" containerName="keystone-bootstrap" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.547422 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" containerName="keystone-bootstrap" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.548146 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.550127 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.550218 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.550330 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.553353 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.553524 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cwsn8" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.563528 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-42k9z"] Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.633673 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv9nb\" (UniqueName: \"kubernetes.io/projected/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-kube-api-access-hv9nb\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.633810 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-config-data\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.633978 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-fernet-keys\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.634042 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-credential-keys\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.634075 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-scripts\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.634202 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-combined-ca-bundle\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.735406 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-fernet-keys\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.735471 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-credential-keys\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.735495 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-scripts\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.735551 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-combined-ca-bundle\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.735588 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv9nb\" (UniqueName: \"kubernetes.io/projected/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-kube-api-access-hv9nb\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.735617 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-config-data\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.740950 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-credential-keys\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.741474 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-combined-ca-bundle\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.741720 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-fernet-keys\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.747689 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-scripts\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.752153 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-config-data\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.753312 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv9nb\" (UniqueName: \"kubernetes.io/projected/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-kube-api-access-hv9nb\") pod \"keystone-bootstrap-42k9z\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:11 crc kubenswrapper[4690]: I0320 13:43:11.916256 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:12 crc kubenswrapper[4690]: I0320 13:43:12.432377 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa" path="/var/lib/kubelet/pods/dd44759e-3f7b-4b74-9ddc-94bc2dfc3dfa/volumes" Mar 20 13:43:13 crc kubenswrapper[4690]: I0320 13:43:13.503241 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: connect: connection refused" Mar 20 13:43:17 crc kubenswrapper[4690]: I0320 13:43:17.018597 4690 scope.go:117] "RemoveContainer" containerID="0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3" Mar 20 13:43:17 crc kubenswrapper[4690]: E0320 13:43:17.045097 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Mar 20 13:43:17 crc kubenswrapper[4690]: E0320 13:43:17.045387 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n566hf9h55dh79hfchch655h64ch664h658hb7h7ch5f9h697h56ch655h8fh7bh645h544h577h675h66fh56ch645h84h576h5cfh5cbhd4h584h696q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2kb26,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-589576fb47-w84wc_openstack(7826df84-54dc-49a4-9942-797331b72c57): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 13:43:17 crc kubenswrapper[4690]: E0320 13:43:17.048602 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-589576fb47-w84wc" podUID="7826df84-54dc-49a4-9942-797331b72c57" Mar 20 13:43:17 crc kubenswrapper[4690]: E0320 13:43:17.054792 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Mar 20 13:43:17 crc kubenswrapper[4690]: E0320 13:43:17.055105 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n574h559h59chbdh64h67h7h59hc6h64bh9dh6dhdbh78h65dh567h59fh5b8h566h654h66ch58ch5c5h84h668h55ch7bh654h595h56ch56bh599q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kv2nk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7969d5769c-bbm4p_openstack(793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 13:43:17 crc kubenswrapper[4690]: E0320 13:43:17.058020 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7969d5769c-bbm4p" podUID="793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08" Mar 20 13:43:17 crc kubenswrapper[4690]: E0320 13:43:17.061769 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Mar 20 13:43:17 crc kubenswrapper[4690]: E0320 13:43:17.061972 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5bfh5fh599h74h77h9dhb9h596h569h586hbfh68ch655h5bfh58bh585h5b8h9bhbdh67bh674h65ch556h648h5dch565hf7h674h57bh689h546h55dq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-78q6f,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-5b696cf447-hntld_openstack(afc7da65-b3f8-4246-abfe-573fa358fa2d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 13:43:17 crc kubenswrapper[4690]: E0320 13:43:17.064276 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-5b696cf447-hntld" podUID="afc7da65-b3f8-4246-abfe-573fa358fa2d" Mar 20 13:43:17 crc kubenswrapper[4690]: I0320 13:43:17.586681 4690 generic.go:334] "Generic (PLEG): container finished" podID="badac960-83c0-4715-b125-0fdd44ae7315" containerID="6eb961c8d2c437e3c78c36daeeee88a831078f66dc0a9d85833a5bc4727e0274" exitCode=0 Mar 20 13:43:17 crc kubenswrapper[4690]: I0320 13:43:17.586877 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8h88w" event={"ID":"badac960-83c0-4715-b125-0fdd44ae7315","Type":"ContainerDied","Data":"6eb961c8d2c437e3c78c36daeeee88a831078f66dc0a9d85833a5bc4727e0274"} Mar 20 13:43:23 crc kubenswrapper[4690]: I0320 13:43:23.502182 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Mar 20 13:43:23 crc kubenswrapper[4690]: I0320 13:43:23.502879 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:43:24 crc kubenswrapper[4690]: E0320 13:43:24.042295 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Mar 20 13:43:24 crc kubenswrapper[4690]: E0320 13:43:24.042495 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nbdh598h5f6h666h58bh688h67h65ch644h78hd4h5b6h5f9h566hcch5bch5dch565h7bh66fh89h669h57fh5cdhc4h5fbh594h5fbhd9h585hc4h8bq,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bk5j5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(93013757-d360-41e7-92a9-211155703015): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.177197 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.181946 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.186980 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.192379 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8h88w" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.207953 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.362783 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-config\") pod \"badac960-83c0-4715-b125-0fdd44ae7315\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.362836 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2kb26\" (UniqueName: \"kubernetes.io/projected/7826df84-54dc-49a4-9942-797331b72c57-kube-api-access-2kb26\") pod \"7826df84-54dc-49a4-9942-797331b72c57\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.362911 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-scripts\") pod \"7826df84-54dc-49a4-9942-797331b72c57\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.362941 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-config-data\") pod \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.362966 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-horizon-secret-key\") pod \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363012 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-swift-storage-0\") pod \"9e320fee-cd78-4d19-b2ac-23dd935a0894\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363027 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8g2d6\" (UniqueName: \"kubernetes.io/projected/9e320fee-cd78-4d19-b2ac-23dd935a0894-kube-api-access-8g2d6\") pod \"9e320fee-cd78-4d19-b2ac-23dd935a0894\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363056 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-config-data\") pod \"afc7da65-b3f8-4246-abfe-573fa358fa2d\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363082 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc7da65-b3f8-4246-abfe-573fa358fa2d-logs\") pod \"afc7da65-b3f8-4246-abfe-573fa358fa2d\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363102 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv2nk\" (UniqueName: \"kubernetes.io/projected/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-kube-api-access-kv2nk\") pod \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363125 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-config-data\") pod \"7826df84-54dc-49a4-9942-797331b72c57\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363226 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/afc7da65-b3f8-4246-abfe-573fa358fa2d-horizon-secret-key\") pod \"afc7da65-b3f8-4246-abfe-573fa358fa2d\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363266 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-svc\") pod \"9e320fee-cd78-4d19-b2ac-23dd935a0894\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363295 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-sb\") pod \"9e320fee-cd78-4d19-b2ac-23dd935a0894\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363314 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7826df84-54dc-49a4-9942-797331b72c57-logs\") pod \"7826df84-54dc-49a4-9942-797331b72c57\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363330 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-nb\") pod \"9e320fee-cd78-4d19-b2ac-23dd935a0894\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363351 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-78q6f\" (UniqueName: \"kubernetes.io/projected/afc7da65-b3f8-4246-abfe-573fa358fa2d-kube-api-access-78q6f\") pod \"afc7da65-b3f8-4246-abfe-573fa358fa2d\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363372 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7826df84-54dc-49a4-9942-797331b72c57-horizon-secret-key\") pod \"7826df84-54dc-49a4-9942-797331b72c57\" (UID: \"7826df84-54dc-49a4-9942-797331b72c57\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363396 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-config\") pod \"9e320fee-cd78-4d19-b2ac-23dd935a0894\" (UID: \"9e320fee-cd78-4d19-b2ac-23dd935a0894\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363417 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-scripts\") pod \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363442 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-drdb4\" (UniqueName: \"kubernetes.io/projected/badac960-83c0-4715-b125-0fdd44ae7315-kube-api-access-drdb4\") pod \"badac960-83c0-4715-b125-0fdd44ae7315\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363466 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-combined-ca-bundle\") pod \"badac960-83c0-4715-b125-0fdd44ae7315\" (UID: \"badac960-83c0-4715-b125-0fdd44ae7315\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363489 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-scripts\") pod \"afc7da65-b3f8-4246-abfe-573fa358fa2d\" (UID: \"afc7da65-b3f8-4246-abfe-573fa358fa2d\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.363508 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-logs\") pod \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\" (UID: \"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08\") " Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.364228 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-logs" (OuterVolumeSpecName: "logs") pod "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08" (UID: "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.369329 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-scripts" (OuterVolumeSpecName: "scripts") pod "7826df84-54dc-49a4-9942-797331b72c57" (UID: "7826df84-54dc-49a4-9942-797331b72c57"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.370339 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/afc7da65-b3f8-4246-abfe-573fa358fa2d-logs" (OuterVolumeSpecName: "logs") pod "afc7da65-b3f8-4246-abfe-573fa358fa2d" (UID: "afc7da65-b3f8-4246-abfe-573fa358fa2d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.373214 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-config-data" (OuterVolumeSpecName: "config-data") pod "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08" (UID: "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.373759 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-config-data" (OuterVolumeSpecName: "config-data") pod "afc7da65-b3f8-4246-abfe-573fa358fa2d" (UID: "afc7da65-b3f8-4246-abfe-573fa358fa2d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.376399 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-scripts" (OuterVolumeSpecName: "scripts") pod "afc7da65-b3f8-4246-abfe-573fa358fa2d" (UID: "afc7da65-b3f8-4246-abfe-573fa358fa2d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.378276 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-scripts" (OuterVolumeSpecName: "scripts") pod "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08" (UID: "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.378468 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-config-data" (OuterVolumeSpecName: "config-data") pod "7826df84-54dc-49a4-9942-797331b72c57" (UID: "7826df84-54dc-49a4-9942-797331b72c57"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.379195 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc7da65-b3f8-4246-abfe-573fa358fa2d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "afc7da65-b3f8-4246-abfe-573fa358fa2d" (UID: "afc7da65-b3f8-4246-abfe-573fa358fa2d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.386559 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7826df84-54dc-49a4-9942-797331b72c57-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "7826df84-54dc-49a4-9942-797331b72c57" (UID: "7826df84-54dc-49a4-9942-797331b72c57"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.387005 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7826df84-54dc-49a4-9942-797331b72c57-logs" (OuterVolumeSpecName: "logs") pod "7826df84-54dc-49a4-9942-797331b72c57" (UID: "7826df84-54dc-49a4-9942-797331b72c57"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.391089 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7826df84-54dc-49a4-9942-797331b72c57-kube-api-access-2kb26" (OuterVolumeSpecName: "kube-api-access-2kb26") pod "7826df84-54dc-49a4-9942-797331b72c57" (UID: "7826df84-54dc-49a4-9942-797331b72c57"). InnerVolumeSpecName "kube-api-access-2kb26". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.404462 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-kube-api-access-kv2nk" (OuterVolumeSpecName: "kube-api-access-kv2nk") pod "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08" (UID: "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08"). InnerVolumeSpecName "kube-api-access-kv2nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.412079 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/badac960-83c0-4715-b125-0fdd44ae7315-kube-api-access-drdb4" (OuterVolumeSpecName: "kube-api-access-drdb4") pod "badac960-83c0-4715-b125-0fdd44ae7315" (UID: "badac960-83c0-4715-b125-0fdd44ae7315"). InnerVolumeSpecName "kube-api-access-drdb4". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.412138 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e320fee-cd78-4d19-b2ac-23dd935a0894-kube-api-access-8g2d6" (OuterVolumeSpecName: "kube-api-access-8g2d6") pod "9e320fee-cd78-4d19-b2ac-23dd935a0894" (UID: "9e320fee-cd78-4d19-b2ac-23dd935a0894"). InnerVolumeSpecName "kube-api-access-8g2d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.412213 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afc7da65-b3f8-4246-abfe-573fa358fa2d-kube-api-access-78q6f" (OuterVolumeSpecName: "kube-api-access-78q6f") pod "afc7da65-b3f8-4246-abfe-573fa358fa2d" (UID: "afc7da65-b3f8-4246-abfe-573fa358fa2d"). InnerVolumeSpecName "kube-api-access-78q6f". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.424897 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08" (UID: "793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.458152 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-config" (OuterVolumeSpecName: "config") pod "badac960-83c0-4715-b125-0fdd44ae7315" (UID: "badac960-83c0-4715-b125-0fdd44ae7315"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.459086 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "badac960-83c0-4715-b125-0fdd44ae7315" (UID: "badac960-83c0-4715-b125-0fdd44ae7315"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465754 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-78q6f\" (UniqueName: \"kubernetes.io/projected/afc7da65-b3f8-4246-abfe-573fa358fa2d-kube-api-access-78q6f\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465782 4690 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/7826df84-54dc-49a4-9942-797331b72c57-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465791 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465799 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-drdb4\" (UniqueName: \"kubernetes.io/projected/badac960-83c0-4715-b125-0fdd44ae7315-kube-api-access-drdb4\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465808 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465818 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465826 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465835 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/badac960-83c0-4715-b125-0fdd44ae7315-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465845 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2kb26\" (UniqueName: \"kubernetes.io/projected/7826df84-54dc-49a4-9942-797331b72c57-kube-api-access-2kb26\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465853 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465872 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465880 4690 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465888 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8g2d6\" (UniqueName: \"kubernetes.io/projected/9e320fee-cd78-4d19-b2ac-23dd935a0894-kube-api-access-8g2d6\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465896 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/afc7da65-b3f8-4246-abfe-573fa358fa2d-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465904 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/afc7da65-b3f8-4246-abfe-573fa358fa2d-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465912 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv2nk\" (UniqueName: \"kubernetes.io/projected/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08-kube-api-access-kv2nk\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465920 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7826df84-54dc-49a4-9942-797331b72c57-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465929 4690 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/afc7da65-b3f8-4246-abfe-573fa358fa2d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.465936 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7826df84-54dc-49a4-9942-797331b72c57-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.475079 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "9e320fee-cd78-4d19-b2ac-23dd935a0894" (UID: "9e320fee-cd78-4d19-b2ac-23dd935a0894"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.501108 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-config" (OuterVolumeSpecName: "config") pod "9e320fee-cd78-4d19-b2ac-23dd935a0894" (UID: "9e320fee-cd78-4d19-b2ac-23dd935a0894"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.508843 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "9e320fee-cd78-4d19-b2ac-23dd935a0894" (UID: "9e320fee-cd78-4d19-b2ac-23dd935a0894"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.510593 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "9e320fee-cd78-4d19-b2ac-23dd935a0894" (UID: "9e320fee-cd78-4d19-b2ac-23dd935a0894"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.518826 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9e320fee-cd78-4d19-b2ac-23dd935a0894" (UID: "9e320fee-cd78-4d19-b2ac-23dd935a0894"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.567802 4690 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.567846 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.567871 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.567884 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.567893 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9e320fee-cd78-4d19-b2ac-23dd935a0894-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.643328 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-589576fb47-w84wc" event={"ID":"7826df84-54dc-49a4-9942-797331b72c57","Type":"ContainerDied","Data":"b7a6b8a23c92215aab3297e3b36133e87b429cf753d8da496e7a6fe288bf7434"} Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.643422 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-589576fb47-w84wc" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.645056 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-5b696cf447-hntld" event={"ID":"afc7da65-b3f8-4246-abfe-573fa358fa2d","Type":"ContainerDied","Data":"5068568503ec6c9eddd54b676a7bea8fb8b30d5df1bb5cae2e01c0425fcb42c8"} Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.645082 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-5b696cf447-hntld" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.647314 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8h88w" event={"ID":"badac960-83c0-4715-b125-0fdd44ae7315","Type":"ContainerDied","Data":"3c7bb7f5ff6cdc8da2e14b01d1d80c747fafba1f33497349ccc7de5465f7119d"} Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.647341 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c7bb7f5ff6cdc8da2e14b01d1d80c747fafba1f33497349ccc7de5465f7119d" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.647471 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8h88w" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.660399 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" event={"ID":"9e320fee-cd78-4d19-b2ac-23dd935a0894","Type":"ContainerDied","Data":"8022aea8874a090584de67eca1a937125334d4d6970da8e98a835da26f02275f"} Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.660495 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.662172 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7969d5769c-bbm4p" Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.662213 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7969d5769c-bbm4p" event={"ID":"793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08","Type":"ContainerDied","Data":"eaf515275b9baab121378b401e0e4035923ed6c6dd8a4c6b9155d133222d7b59"} Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.727524 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-589576fb47-w84wc"] Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.736168 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-589576fb47-w84wc"] Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.749535 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-5b696cf447-hntld"] Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.757692 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-5b696cf447-hntld"] Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.764402 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-fdmgt"] Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.773122 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5f59b8f679-fdmgt"] Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.788375 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7969d5769c-bbm4p"] Mar 20 13:43:24 crc kubenswrapper[4690]: I0320 13:43:24.795217 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7969d5769c-bbm4p"] Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.483265 4690 scope.go:117] "RemoveContainer" containerID="bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c" Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.486968 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c\": container with ID starting with bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c not found: ID does not exist" containerID="bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.487044 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c"} err="failed to get container status \"bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c\": rpc error: code = NotFound desc = could not find container \"bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c\": container with ID starting with bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c not found: ID does not exist" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.487078 4690 scope.go:117] "RemoveContainer" containerID="0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3" Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.489155 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3\": container with ID starting with 0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3 not found: ID does not exist" containerID="0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.489219 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3"} err="failed to get container status \"0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3\": rpc error: code = NotFound desc = could not find container \"0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3\": container with ID starting with 0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3 not found: ID does not exist" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.489246 4690 scope.go:117] "RemoveContainer" containerID="bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.490484 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c"} err="failed to get container status \"bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c\": rpc error: code = NotFound desc = could not find container \"bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c\": container with ID starting with bbb3871f1775e3140404693b0d33b2a38d21c47412cd2682b0e5d51f4bafd51c not found: ID does not exist" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.490515 4690 scope.go:117] "RemoveContainer" containerID="0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.491309 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3"} err="failed to get container status \"0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3\": rpc error: code = NotFound desc = could not find container \"0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3\": container with ID starting with 0c514ee3ac35df6273a9d5413fca8047fb81c31eb1ebc270bc9b4e02a95816a3 not found: ID does not exist" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.491335 4690 scope.go:117] "RemoveContainer" containerID="0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.492440 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-h7ptl"] Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.494788 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="dnsmasq-dns" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.494813 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="dnsmasq-dns" Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.494829 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="badac960-83c0-4715-b125-0fdd44ae7315" containerName="neutron-db-sync" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.494836 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="badac960-83c0-4715-b125-0fdd44ae7315" containerName="neutron-db-sync" Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.494873 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="init" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.494880 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="init" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.495035 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="badac960-83c0-4715-b125-0fdd44ae7315" containerName="neutron-db-sync" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.495062 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="dnsmasq-dns" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.495874 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.527120 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-h7ptl"] Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.569252 4690 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.569438 4690 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mdcnd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-6rgrr_openstack(3def27d2-bdda-4c07-b4b2-f695994bd509): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.570757 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-6rgrr" podUID="3def27d2-bdda-4c07-b4b2-f695994bd509" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.636514 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-65b6bf5884-9kvqh"] Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.638265 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.640839 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.642972 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.643251 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-k2q5j" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.644040 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.653890 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-65b6bf5884-9kvqh"] Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.689312 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qkzdp\" (UniqueName: \"kubernetes.io/projected/5e266b86-47ff-435f-b619-baa374a78476-kube-api-access-qkzdp\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.689348 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.689372 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.689424 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-config\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.689484 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-svc\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.689504 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.701710 4690 scope.go:117] "RemoveContainer" containerID="8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.751752 4690 scope.go:117] "RemoveContainer" containerID="0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4" Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.751873 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-6rgrr" podUID="3def27d2-bdda-4c07-b4b2-f695994bd509" Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.752374 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4\": container with ID starting with 0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4 not found: ID does not exist" containerID="0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.752413 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4"} err="failed to get container status \"0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4\": rpc error: code = NotFound desc = could not find container \"0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4\": container with ID starting with 0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4 not found: ID does not exist" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.752440 4690 scope.go:117] "RemoveContainer" containerID="8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d" Mar 20 13:43:25 crc kubenswrapper[4690]: E0320 13:43:25.754433 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d\": container with ID starting with 8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d not found: ID does not exist" containerID="8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.754470 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d"} err="failed to get container status \"8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d\": rpc error: code = NotFound desc = could not find container \"8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d\": container with ID starting with 8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d not found: ID does not exist" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.754495 4690 scope.go:117] "RemoveContainer" containerID="0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.754752 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4"} err="failed to get container status \"0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4\": rpc error: code = NotFound desc = could not find container \"0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4\": container with ID starting with 0f04bdcb570296299af4ffc1b0261283185366ddbf70d15a9caba690c913eea4 not found: ID does not exist" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.754769 4690 scope.go:117] "RemoveContainer" containerID="8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.755009 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d"} err="failed to get container status \"8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d\": rpc error: code = NotFound desc = could not find container \"8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d\": container with ID starting with 8acc831022bec294eb43394143012e3d1ca9787345c92b958218c7089623457d not found: ID does not exist" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.755025 4690 scope.go:117] "RemoveContainer" containerID="b609a331b6c8b2fcb43e5bb73f626c651e6f9c616a53f06abf6ef6b7cd424c9e" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.791694 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qkzdp\" (UniqueName: \"kubernetes.io/projected/5e266b86-47ff-435f-b619-baa374a78476-kube-api-access-qkzdp\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.791733 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-config\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.791770 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.791790 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.791861 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-httpd-config\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.792081 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-config\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.792236 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrt2d\" (UniqueName: \"kubernetes.io/projected/70037527-dc76-4c31-9841-6cc6b27fe032-kube-api-access-zrt2d\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.792496 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-svc\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.792559 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.792599 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-combined-ca-bundle\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.793283 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-config\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.793326 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-swift-storage-0\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.796692 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-nb\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.798614 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-sb\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.798655 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-svc\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.805000 4690 scope.go:117] "RemoveContainer" containerID="a664555c062840e44661f8ab5f0afa2c9f8b795f7a48ba1480cecbe44fc15405" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.812077 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-ovndb-tls-certs\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.815382 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qkzdp\" (UniqueName: \"kubernetes.io/projected/5e266b86-47ff-435f-b619-baa374a78476-kube-api-access-qkzdp\") pod \"dnsmasq-dns-6b7b667979-h7ptl\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.913123 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-config\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.913501 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-httpd-config\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.913532 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrt2d\" (UniqueName: \"kubernetes.io/projected/70037527-dc76-4c31-9841-6cc6b27fe032-kube-api-access-zrt2d\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.913613 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-combined-ca-bundle\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.913647 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-ovndb-tls-certs\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.917796 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-ovndb-tls-certs\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.918536 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-httpd-config\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.920495 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-combined-ca-bundle\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.921206 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-config\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.929390 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrt2d\" (UniqueName: \"kubernetes.io/projected/70037527-dc76-4c31-9841-6cc6b27fe032-kube-api-access-zrt2d\") pod \"neutron-65b6bf5884-9kvqh\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.942506 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:25 crc kubenswrapper[4690]: I0320 13:43:25.969258 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.161923 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7946cd7f64-rm6mr"] Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.166542 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-587c585984-xs7nl"] Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.264435 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.352251 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-42k9z"] Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.399032 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.426180 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7826df84-54dc-49a4-9942-797331b72c57" path="/var/lib/kubelet/pods/7826df84-54dc-49a4-9942-797331b72c57/volumes" Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.426706 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08" path="/var/lib/kubelet/pods/793dcbd3-c773-4de7-8d5d-b3c9a9d3ef08/volumes" Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.427169 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" path="/var/lib/kubelet/pods/9e320fee-cd78-4d19-b2ac-23dd935a0894/volumes" Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.428011 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="afc7da65-b3f8-4246-abfe-573fa358fa2d" path="/var/lib/kubelet/pods/afc7da65-b3f8-4246-abfe-573fa358fa2d/volumes" Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.604089 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-h7ptl"] Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.689635 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-65b6bf5884-9kvqh"] Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.759270 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e43c11ca-5233-4250-b0cb-8b814c19f794","Type":"ContainerStarted","Data":"83e72daa340797776d14bb081bc062ddf6dea61eb03a8258a28499f50a3e0887"} Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.762591 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65b6bf5884-9kvqh" event={"ID":"70037527-dc76-4c31-9841-6cc6b27fe032","Type":"ContainerStarted","Data":"0cd787c465eb335457020ce6b22281ed4fdb63d2c9ee2d062ac8bd5a51c117b7"} Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.766550 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bsz48" event={"ID":"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9","Type":"ContainerStarted","Data":"f73270b307867eed494571e6f392667d0c3f6765b8a9e8edd0c26f70820ca006"} Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.769710 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-587c585984-xs7nl" event={"ID":"ae74738f-0b10-4955-97fb-e892ca7102a0","Type":"ContainerStarted","Data":"6fd144d885bf415183b901f7a932e68b590fe359126cb026f695a082096a10a7"} Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.771011 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vgnp6" event={"ID":"6629e615-4e98-4e99-b7dc-6990b379d93c","Type":"ContainerStarted","Data":"d2a041c9a49926b0aa0ee09c2bb260b6f40b3a0c827cc8216d752c83cf4b41d6"} Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.773086 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7946cd7f64-rm6mr" event={"ID":"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596","Type":"ContainerStarted","Data":"adf5273ff2397e3fe60c6597b6617547208286f28dab8397910de1a115e9ffcf"} Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.774829 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" event={"ID":"5e266b86-47ff-435f-b619-baa374a78476","Type":"ContainerStarted","Data":"e93664ffafe947d63a827b9e2cf5a737bd6d03b2d483f839fd47e310e2618aa7"} Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.780129 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c70c11e8-bf26-41f0-9ca7-d135428c216e","Type":"ContainerStarted","Data":"f4149e6dd62f913467199ed567fe0769f0f203045ed1356199fa28c8dd70a0ab"} Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.785495 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-bsz48" podStartSLOduration=5.039010608 podStartE2EDuration="29.785478477s" podCreationTimestamp="2026-03-20 13:42:57 +0000 UTC" firstStartedPulling="2026-03-20 13:42:59.310802333 +0000 UTC m=+1225.600402276" lastFinishedPulling="2026-03-20 13:43:24.057270212 +0000 UTC m=+1250.346870145" observedRunningTime="2026-03-20 13:43:26.780889136 +0000 UTC m=+1253.070489079" watchObservedRunningTime="2026-03-20 13:43:26.785478477 +0000 UTC m=+1253.075078420" Mar 20 13:43:26 crc kubenswrapper[4690]: I0320 13:43:26.800924 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-vgnp6" podStartSLOduration=5.063729616 podStartE2EDuration="29.800907447s" podCreationTimestamp="2026-03-20 13:42:57 +0000 UTC" firstStartedPulling="2026-03-20 13:42:59.326334299 +0000 UTC m=+1225.615934242" lastFinishedPulling="2026-03-20 13:43:24.06351213 +0000 UTC m=+1250.353112073" observedRunningTime="2026-03-20 13:43:26.796894203 +0000 UTC m=+1253.086494166" watchObservedRunningTime="2026-03-20 13:43:26.800907447 +0000 UTC m=+1253.090507380" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.715985 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-59dcfcb47f-zsm8d"] Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.717546 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.725435 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.726666 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.752108 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-59dcfcb47f-zsm8d"] Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.779686 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-combined-ca-bundle\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.779732 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mljhd\" (UniqueName: \"kubernetes.io/projected/45b97511-9613-4868-844e-689823a4ae38-kube-api-access-mljhd\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.779773 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-config\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.779795 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-internal-tls-certs\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.779835 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-httpd-config\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.779879 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-public-tls-certs\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.779911 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-ovndb-tls-certs\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.853712 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"93013757-d360-41e7-92a9-211155703015","Type":"ContainerStarted","Data":"898385b96ceec0a95ab4e0586069810b30d39cb8f8e881a7ffe396d66b2ca02f"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.857517 4690 generic.go:334] "Generic (PLEG): container finished" podID="5e266b86-47ff-435f-b619-baa374a78476" containerID="4c83374abc3b88f5cc4def22c450cd92748c8254e73dc65fb53053773873ecb4" exitCode=0 Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.857586 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" event={"ID":"5e266b86-47ff-435f-b619-baa374a78476","Type":"ContainerDied","Data":"4c83374abc3b88f5cc4def22c450cd92748c8254e73dc65fb53053773873ecb4"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.869217 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-587c585984-xs7nl" event={"ID":"ae74738f-0b10-4955-97fb-e892ca7102a0","Type":"ContainerStarted","Data":"b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.871230 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-42k9z" event={"ID":"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007","Type":"ContainerStarted","Data":"6111af7d9d2d98e5bb885344274a5405f8b1848e3df4d659b5d17f660ced3ac9"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.871256 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-42k9z" event={"ID":"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007","Type":"ContainerStarted","Data":"df299107022bc523892ceb69fbfffc3c3205b055c7a10aa57a3f4c6dfb9d1e0b"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.884738 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-ovndb-tls-certs\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.885831 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-combined-ca-bundle\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.885887 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mljhd\" (UniqueName: \"kubernetes.io/projected/45b97511-9613-4868-844e-689823a4ae38-kube-api-access-mljhd\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.885942 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-config\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.885973 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-internal-tls-certs\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.886085 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-httpd-config\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.886132 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-public-tls-certs\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.888765 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-ovndb-tls-certs\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.892255 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-public-tls-certs\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.893090 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-combined-ca-bundle\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.895920 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-config\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.896482 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-internal-tls-certs\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.903262 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-httpd-config\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.904143 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c70c11e8-bf26-41f0-9ca7-d135428c216e","Type":"ContainerStarted","Data":"41e98532edb7d23551a797b20ca95a8585c0d31423aa05dcb3b3f34216d9a931"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.905662 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-42k9z" podStartSLOduration=16.905650318 podStartE2EDuration="16.905650318s" podCreationTimestamp="2026-03-20 13:43:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:27.904434403 +0000 UTC m=+1254.194034346" watchObservedRunningTime="2026-03-20 13:43:27.905650318 +0000 UTC m=+1254.195250271" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.913368 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mljhd\" (UniqueName: \"kubernetes.io/projected/45b97511-9613-4868-844e-689823a4ae38-kube-api-access-mljhd\") pod \"neutron-59dcfcb47f-zsm8d\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.932216 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7946cd7f64-rm6mr" event={"ID":"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596","Type":"ContainerStarted","Data":"af7b8484452247f0e4d3cbfead8e4badb5672eaf24081be08dfcfa68a943c54e"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.932264 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7946cd7f64-rm6mr" event={"ID":"ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596","Type":"ContainerStarted","Data":"c87f40225a9f0eb6c018207263fb33712c90b9a00467d1a98a0d28b4bc4ecddb"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.934545 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e43c11ca-5233-4250-b0cb-8b814c19f794","Type":"ContainerStarted","Data":"985ca9be3b374da921ccadaf27a55bca782edbe7d5e28d648f2032cc1faadab2"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.938069 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65b6bf5884-9kvqh" event={"ID":"70037527-dc76-4c31-9841-6cc6b27fe032","Type":"ContainerStarted","Data":"beccd301a63fd5b4871cde64deb7972cdfb711b02983ef69bd12f554c35d77ad"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.938180 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65b6bf5884-9kvqh" event={"ID":"70037527-dc76-4c31-9841-6cc6b27fe032","Type":"ContainerStarted","Data":"1ee497351d53e8a9f9a8191685b5badaba8117a7b558f7bb5ab2d0b931252a50"} Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.938313 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:27 crc kubenswrapper[4690]: I0320 13:43:27.978715 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-7946cd7f64-rm6mr" podStartSLOduration=21.28303812 podStartE2EDuration="21.978697312s" podCreationTimestamp="2026-03-20 13:43:06 +0000 UTC" firstStartedPulling="2026-03-20 13:43:26.175254962 +0000 UTC m=+1252.464854905" lastFinishedPulling="2026-03-20 13:43:26.870914154 +0000 UTC m=+1253.160514097" observedRunningTime="2026-03-20 13:43:27.971030293 +0000 UTC m=+1254.260630236" watchObservedRunningTime="2026-03-20 13:43:27.978697312 +0000 UTC m=+1254.268297255" Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:27.999628 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-65b6bf5884-9kvqh" podStartSLOduration=2.999605858 podStartE2EDuration="2.999605858s" podCreationTimestamp="2026-03-20 13:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:27.999421953 +0000 UTC m=+1254.289021896" watchObservedRunningTime="2026-03-20 13:43:27.999605858 +0000 UTC m=+1254.289205801" Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.115642 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.503088 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-5f59b8f679-fdmgt" podUID="9e320fee-cd78-4d19-b2ac-23dd935a0894" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.129:5353: i/o timeout" Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.721670 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-59dcfcb47f-zsm8d"] Mar 20 13:43:28 crc kubenswrapper[4690]: W0320 13:43:28.744036 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod45b97511_9613_4868_844e_689823a4ae38.slice/crio-13a7fb714bb3d3e8957d5ecf719180866fb096de275689d50bff70d6fb4ec9eb WatchSource:0}: Error finding container 13a7fb714bb3d3e8957d5ecf719180866fb096de275689d50bff70d6fb4ec9eb: Status 404 returned error can't find the container with id 13a7fb714bb3d3e8957d5ecf719180866fb096de275689d50bff70d6fb4ec9eb Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.948590 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-59dcfcb47f-zsm8d" event={"ID":"45b97511-9613-4868-844e-689823a4ae38","Type":"ContainerStarted","Data":"13a7fb714bb3d3e8957d5ecf719180866fb096de275689d50bff70d6fb4ec9eb"} Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.951047 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-587c585984-xs7nl" event={"ID":"ae74738f-0b10-4955-97fb-e892ca7102a0","Type":"ContainerStarted","Data":"05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34"} Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.955465 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c70c11e8-bf26-41f0-9ca7-d135428c216e","Type":"ContainerStarted","Data":"6994593b0fe37bac76e1ada026777d3e58ac8c0b7a2ab2baa1b2069a8ad46fdc"} Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.955640 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerName="glance-httpd" containerID="cri-o://6994593b0fe37bac76e1ada026777d3e58ac8c0b7a2ab2baa1b2069a8ad46fdc" gracePeriod=30 Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.955503 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerName="glance-log" containerID="cri-o://41e98532edb7d23551a797b20ca95a8585c0d31423aa05dcb3b3f34216d9a931" gracePeriod=30 Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.963074 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e43c11ca-5233-4250-b0cb-8b814c19f794","Type":"ContainerStarted","Data":"f2bcd13849dd3e4b0c80bfebb8ecd8a8c87ce6e3cc3d5b7e99ba7317d8db1be2"} Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.963211 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerName="glance-log" containerID="cri-o://985ca9be3b374da921ccadaf27a55bca782edbe7d5e28d648f2032cc1faadab2" gracePeriod=30 Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.963398 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerName="glance-httpd" containerID="cri-o://f2bcd13849dd3e4b0c80bfebb8ecd8a8c87ce6e3cc3d5b7e99ba7317d8db1be2" gracePeriod=30 Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.979035 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-587c585984-xs7nl" podStartSLOduration=22.33521174 podStartE2EDuration="22.979018884s" podCreationTimestamp="2026-03-20 13:43:06 +0000 UTC" firstStartedPulling="2026-03-20 13:43:26.185923506 +0000 UTC m=+1252.475523449" lastFinishedPulling="2026-03-20 13:43:26.82973065 +0000 UTC m=+1253.119330593" observedRunningTime="2026-03-20 13:43:28.976556373 +0000 UTC m=+1255.266156316" watchObservedRunningTime="2026-03-20 13:43:28.979018884 +0000 UTC m=+1255.268618827" Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.996696 4690 generic.go:334] "Generic (PLEG): container finished" podID="d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" containerID="f73270b307867eed494571e6f392667d0c3f6765b8a9e8edd0c26f70820ca006" exitCode=0 Mar 20 13:43:28 crc kubenswrapper[4690]: I0320 13:43:28.996839 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bsz48" event={"ID":"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9","Type":"ContainerDied","Data":"f73270b307867eed494571e6f392667d0c3f6765b8a9e8edd0c26f70820ca006"} Mar 20 13:43:29 crc kubenswrapper[4690]: I0320 13:43:29.003031 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=26.002998928 podStartE2EDuration="26.002998928s" podCreationTimestamp="2026-03-20 13:43:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:28.999287172 +0000 UTC m=+1255.288887115" watchObservedRunningTime="2026-03-20 13:43:29.002998928 +0000 UTC m=+1255.292598871" Mar 20 13:43:29 crc kubenswrapper[4690]: I0320 13:43:29.004627 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" event={"ID":"5e266b86-47ff-435f-b619-baa374a78476","Type":"ContainerStarted","Data":"ea454ed32cf5432c0ce784a14841615e0b4764b96a40182ae7f47a05e4b4447a"} Mar 20 13:43:29 crc kubenswrapper[4690]: I0320 13:43:29.004678 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:29 crc kubenswrapper[4690]: I0320 13:43:29.040275 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=26.04025491 podStartE2EDuration="26.04025491s" podCreationTimestamp="2026-03-20 13:43:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:29.025730826 +0000 UTC m=+1255.315330769" watchObservedRunningTime="2026-03-20 13:43:29.04025491 +0000 UTC m=+1255.329854853" Mar 20 13:43:29 crc kubenswrapper[4690]: I0320 13:43:29.075444 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" podStartSLOduration=4.075421923 podStartE2EDuration="4.075421923s" podCreationTimestamp="2026-03-20 13:43:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:29.069451453 +0000 UTC m=+1255.359051396" watchObservedRunningTime="2026-03-20 13:43:29.075421923 +0000 UTC m=+1255.365021866" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.036727 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-59dcfcb47f-zsm8d" event={"ID":"45b97511-9613-4868-844e-689823a4ae38","Type":"ContainerStarted","Data":"20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58"} Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.037087 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-59dcfcb47f-zsm8d" event={"ID":"45b97511-9613-4868-844e-689823a4ae38","Type":"ContainerStarted","Data":"2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c"} Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.037117 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.044893 4690 generic.go:334] "Generic (PLEG): container finished" podID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerID="6994593b0fe37bac76e1ada026777d3e58ac8c0b7a2ab2baa1b2069a8ad46fdc" exitCode=0 Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.044927 4690 generic.go:334] "Generic (PLEG): container finished" podID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerID="41e98532edb7d23551a797b20ca95a8585c0d31423aa05dcb3b3f34216d9a931" exitCode=143 Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.045067 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c70c11e8-bf26-41f0-9ca7-d135428c216e","Type":"ContainerDied","Data":"6994593b0fe37bac76e1ada026777d3e58ac8c0b7a2ab2baa1b2069a8ad46fdc"} Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.045099 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c70c11e8-bf26-41f0-9ca7-d135428c216e","Type":"ContainerDied","Data":"41e98532edb7d23551a797b20ca95a8585c0d31423aa05dcb3b3f34216d9a931"} Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.070004 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-59dcfcb47f-zsm8d" podStartSLOduration=3.06998309 podStartE2EDuration="3.06998309s" podCreationTimestamp="2026-03-20 13:43:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:30.068635632 +0000 UTC m=+1256.358235585" watchObservedRunningTime="2026-03-20 13:43:30.06998309 +0000 UTC m=+1256.359583033" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.077693 4690 generic.go:334] "Generic (PLEG): container finished" podID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerID="f2bcd13849dd3e4b0c80bfebb8ecd8a8c87ce6e3cc3d5b7e99ba7317d8db1be2" exitCode=0 Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.077753 4690 generic.go:334] "Generic (PLEG): container finished" podID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerID="985ca9be3b374da921ccadaf27a55bca782edbe7d5e28d648f2032cc1faadab2" exitCode=143 Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.078052 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e43c11ca-5233-4250-b0cb-8b814c19f794","Type":"ContainerDied","Data":"f2bcd13849dd3e4b0c80bfebb8ecd8a8c87ce6e3cc3d5b7e99ba7317d8db1be2"} Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.078081 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e43c11ca-5233-4250-b0cb-8b814c19f794","Type":"ContainerDied","Data":"985ca9be3b374da921ccadaf27a55bca782edbe7d5e28d648f2032cc1faadab2"} Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.339937 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.437932 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-config-data\") pod \"e43c11ca-5233-4250-b0cb-8b814c19f794\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.438475 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-combined-ca-bundle\") pod \"e43c11ca-5233-4250-b0cb-8b814c19f794\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.438672 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-httpd-run\") pod \"e43c11ca-5233-4250-b0cb-8b814c19f794\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.439009 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-scripts\") pod \"e43c11ca-5233-4250-b0cb-8b814c19f794\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.439090 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-svszk\" (UniqueName: \"kubernetes.io/projected/e43c11ca-5233-4250-b0cb-8b814c19f794-kube-api-access-svszk\") pod \"e43c11ca-5233-4250-b0cb-8b814c19f794\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.439672 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-internal-tls-certs\") pod \"e43c11ca-5233-4250-b0cb-8b814c19f794\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.439920 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"e43c11ca-5233-4250-b0cb-8b814c19f794\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.439974 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-logs\") pod \"e43c11ca-5233-4250-b0cb-8b814c19f794\" (UID: \"e43c11ca-5233-4250-b0cb-8b814c19f794\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.441549 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-logs" (OuterVolumeSpecName: "logs") pod "e43c11ca-5233-4250-b0cb-8b814c19f794" (UID: "e43c11ca-5233-4250-b0cb-8b814c19f794"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.460175 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e43c11ca-5233-4250-b0cb-8b814c19f794-kube-api-access-svszk" (OuterVolumeSpecName: "kube-api-access-svszk") pod "e43c11ca-5233-4250-b0cb-8b814c19f794" (UID: "e43c11ca-5233-4250-b0cb-8b814c19f794"). InnerVolumeSpecName "kube-api-access-svszk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.461475 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "e43c11ca-5233-4250-b0cb-8b814c19f794" (UID: "e43c11ca-5233-4250-b0cb-8b814c19f794"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.465150 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-scripts" (OuterVolumeSpecName: "scripts") pod "e43c11ca-5233-4250-b0cb-8b814c19f794" (UID: "e43c11ca-5233-4250-b0cb-8b814c19f794"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.473343 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "e43c11ca-5233-4250-b0cb-8b814c19f794" (UID: "e43c11ca-5233-4250-b0cb-8b814c19f794"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.488142 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e43c11ca-5233-4250-b0cb-8b814c19f794" (UID: "e43c11ca-5233-4250-b0cb-8b814c19f794"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.520262 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-config-data" (OuterVolumeSpecName: "config-data") pod "e43c11ca-5233-4250-b0cb-8b814c19f794" (UID: "e43c11ca-5233-4250-b0cb-8b814c19f794"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.525520 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bsz48" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.540232 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "e43c11ca-5233-4250-b0cb-8b814c19f794" (UID: "e43c11ca-5233-4250-b0cb-8b814c19f794"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.542604 4690 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.542622 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.542632 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.542642 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.542652 4690 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e43c11ca-5233-4250-b0cb-8b814c19f794-httpd-run\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.542661 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.542670 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-svszk\" (UniqueName: \"kubernetes.io/projected/e43c11ca-5233-4250-b0cb-8b814c19f794-kube-api-access-svszk\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.542678 4690 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/e43c11ca-5233-4250-b0cb-8b814c19f794-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.603346 4690 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.643725 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mp9kl\" (UniqueName: \"kubernetes.io/projected/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-kube-api-access-mp9kl\") pod \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.643792 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-scripts\") pod \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.643840 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-logs\") pod \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.643889 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-config-data\") pod \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.643954 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-combined-ca-bundle\") pod \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\" (UID: \"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.644409 4690 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.648503 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-logs" (OuterVolumeSpecName: "logs") pod "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" (UID: "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.648895 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-scripts" (OuterVolumeSpecName: "scripts") pod "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" (UID: "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.650625 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-kube-api-access-mp9kl" (OuterVolumeSpecName: "kube-api-access-mp9kl") pod "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" (UID: "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9"). InnerVolumeSpecName "kube-api-access-mp9kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.672010 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" (UID: "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.679560 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-config-data" (OuterVolumeSpecName: "config-data") pod "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" (UID: "d6a06dc2-5128-47d4-a10a-e2ba196ec0c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.745929 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mp9kl\" (UniqueName: \"kubernetes.io/projected/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-kube-api-access-mp9kl\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.745965 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.745975 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.745984 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.745994 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.790565 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.848395 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-scripts\") pod \"c70c11e8-bf26-41f0-9ca7-d135428c216e\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.848432 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-combined-ca-bundle\") pod \"c70c11e8-bf26-41f0-9ca7-d135428c216e\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.848463 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4qrzg\" (UniqueName: \"kubernetes.io/projected/c70c11e8-bf26-41f0-9ca7-d135428c216e-kube-api-access-4qrzg\") pod \"c70c11e8-bf26-41f0-9ca7-d135428c216e\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.848501 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-httpd-run\") pod \"c70c11e8-bf26-41f0-9ca7-d135428c216e\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.848517 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-logs\") pod \"c70c11e8-bf26-41f0-9ca7-d135428c216e\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.848540 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-config-data\") pod \"c70c11e8-bf26-41f0-9ca7-d135428c216e\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.848629 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-public-tls-certs\") pod \"c70c11e8-bf26-41f0-9ca7-d135428c216e\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.848660 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"c70c11e8-bf26-41f0-9ca7-d135428c216e\" (UID: \"c70c11e8-bf26-41f0-9ca7-d135428c216e\") " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.849249 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c70c11e8-bf26-41f0-9ca7-d135428c216e" (UID: "c70c11e8-bf26-41f0-9ca7-d135428c216e"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.852796 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-logs" (OuterVolumeSpecName: "logs") pod "c70c11e8-bf26-41f0-9ca7-d135428c216e" (UID: "c70c11e8-bf26-41f0-9ca7-d135428c216e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.853966 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "c70c11e8-bf26-41f0-9ca7-d135428c216e" (UID: "c70c11e8-bf26-41f0-9ca7-d135428c216e"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.856083 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c70c11e8-bf26-41f0-9ca7-d135428c216e-kube-api-access-4qrzg" (OuterVolumeSpecName: "kube-api-access-4qrzg") pod "c70c11e8-bf26-41f0-9ca7-d135428c216e" (UID: "c70c11e8-bf26-41f0-9ca7-d135428c216e"). InnerVolumeSpecName "kube-api-access-4qrzg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.857018 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-scripts" (OuterVolumeSpecName: "scripts") pod "c70c11e8-bf26-41f0-9ca7-d135428c216e" (UID: "c70c11e8-bf26-41f0-9ca7-d135428c216e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.878161 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c70c11e8-bf26-41f0-9ca7-d135428c216e" (UID: "c70c11e8-bf26-41f0-9ca7-d135428c216e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.897162 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-config-data" (OuterVolumeSpecName: "config-data") pod "c70c11e8-bf26-41f0-9ca7-d135428c216e" (UID: "c70c11e8-bf26-41f0-9ca7-d135428c216e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.902345 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "c70c11e8-bf26-41f0-9ca7-d135428c216e" (UID: "c70c11e8-bf26-41f0-9ca7-d135428c216e"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.951005 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.951068 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.951079 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4qrzg\" (UniqueName: \"kubernetes.io/projected/c70c11e8-bf26-41f0-9ca7-d135428c216e-kube-api-access-4qrzg\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.951088 4690 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-httpd-run\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.951096 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c70c11e8-bf26-41f0-9ca7-d135428c216e-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.951105 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.951114 4690 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c70c11e8-bf26-41f0-9ca7-d135428c216e-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.951139 4690 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Mar 20 13:43:30 crc kubenswrapper[4690]: I0320 13:43:30.966103 4690 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.052490 4690 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.098776 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"e43c11ca-5233-4250-b0cb-8b814c19f794","Type":"ContainerDied","Data":"83e72daa340797776d14bb081bc062ddf6dea61eb03a8258a28499f50a3e0887"} Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.098825 4690 scope.go:117] "RemoveContainer" containerID="f2bcd13849dd3e4b0c80bfebb8ecd8a8c87ce6e3cc3d5b7e99ba7317d8db1be2" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.098971 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.129397 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-bsz48" event={"ID":"d6a06dc2-5128-47d4-a10a-e2ba196ec0c9","Type":"ContainerDied","Data":"ee8f55ed9458a8845e372ebedb8d7937bf996410ea0089a7730d80120f99488f"} Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.129441 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee8f55ed9458a8845e372ebedb8d7937bf996410ea0089a7730d80120f99488f" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.129534 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-bsz48" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.144455 4690 generic.go:334] "Generic (PLEG): container finished" podID="6629e615-4e98-4e99-b7dc-6990b379d93c" containerID="d2a041c9a49926b0aa0ee09c2bb260b6f40b3a0c827cc8216d752c83cf4b41d6" exitCode=0 Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.144678 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vgnp6" event={"ID":"6629e615-4e98-4e99-b7dc-6990b379d93c","Type":"ContainerDied","Data":"d2a041c9a49926b0aa0ee09c2bb260b6f40b3a0c827cc8216d752c83cf4b41d6"} Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.153491 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.157601 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c70c11e8-bf26-41f0-9ca7-d135428c216e","Type":"ContainerDied","Data":"f4149e6dd62f913467199ed567fe0769f0f203045ed1356199fa28c8dd70a0ab"} Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.248511 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-94bccb6f6-kk87d"] Mar 20 13:43:31 crc kubenswrapper[4690]: E0320 13:43:31.260723 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerName="glance-httpd" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.260776 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerName="glance-httpd" Mar 20 13:43:31 crc kubenswrapper[4690]: E0320 13:43:31.260799 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerName="glance-log" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.260807 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerName="glance-log" Mar 20 13:43:31 crc kubenswrapper[4690]: E0320 13:43:31.260820 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerName="glance-httpd" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.260827 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerName="glance-httpd" Mar 20 13:43:31 crc kubenswrapper[4690]: E0320 13:43:31.260878 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" containerName="placement-db-sync" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.260888 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" containerName="placement-db-sync" Mar 20 13:43:31 crc kubenswrapper[4690]: E0320 13:43:31.260906 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerName="glance-log" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.260916 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerName="glance-log" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.261209 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" containerName="placement-db-sync" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.261257 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerName="glance-httpd" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.261273 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerName="glance-log" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.261285 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="e43c11ca-5233-4250-b0cb-8b814c19f794" containerName="glance-httpd" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.261303 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="c70c11e8-bf26-41f0-9ca7-d135428c216e" containerName="glance-log" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.268659 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.276002 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.276310 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.276422 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-cm226" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.276529 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.276644 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.291208 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-94bccb6f6-kk87d"] Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.322521 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.354493 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.365699 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-scripts\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.365744 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-internal-tls-certs\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.365768 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-combined-ca-bundle\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.365791 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bce072-cb50-4167-92cc-eab9c8501d2d-logs\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.365823 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-config-data\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.365867 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-public-tls-certs\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.365889 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vjbz\" (UniqueName: \"kubernetes.io/projected/d4bce072-cb50-4167-92cc-eab9c8501d2d-kube-api-access-4vjbz\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.384097 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.424414 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.445808 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.447561 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.450830 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-bch99" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.451108 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.451424 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.451669 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.464922 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.467303 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-scripts\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.467356 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-internal-tls-certs\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.467380 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-combined-ca-bundle\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.467403 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bce072-cb50-4167-92cc-eab9c8501d2d-logs\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.467437 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-config-data\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.467478 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-public-tls-certs\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.467506 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vjbz\" (UniqueName: \"kubernetes.io/projected/d4bce072-cb50-4167-92cc-eab9c8501d2d-kube-api-access-4vjbz\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.469174 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bce072-cb50-4167-92cc-eab9c8501d2d-logs\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.472218 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-scripts\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.480206 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-internal-tls-certs\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.482703 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-combined-ca-bundle\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.488936 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.490275 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.491292 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-config-data\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.495180 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.497269 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-public-tls-certs\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.500735 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.512959 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.548461 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vjbz\" (UniqueName: \"kubernetes.io/projected/d4bce072-cb50-4167-92cc-eab9c8501d2d-kube-api-access-4vjbz\") pod \"placement-94bccb6f6-kk87d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571044 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571088 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-logs\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571108 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571136 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z57rj\" (UniqueName: \"kubernetes.io/projected/04b6b07b-9136-4036-a8a1-f048b6b41b44-kube-api-access-z57rj\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571158 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571176 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-logs\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571195 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571231 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571263 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571281 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571311 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571328 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571354 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571370 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-config-data\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571388 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8m69\" (UniqueName: \"kubernetes.io/projected/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-kube-api-access-f8m69\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.571412 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-scripts\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.609259 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673313 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673658 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-logs\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673681 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673708 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z57rj\" (UniqueName: \"kubernetes.io/projected/04b6b07b-9136-4036-a8a1-f048b6b41b44-kube-api-access-z57rj\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673730 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673748 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-logs\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673768 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673799 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673835 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673886 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673917 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673939 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673967 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.673986 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-config-data\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.674004 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8m69\" (UniqueName: \"kubernetes.io/projected/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-kube-api-access-f8m69\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.674035 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-scripts\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.674181 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-logs\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.675054 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.675320 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.675917 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-logs\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.676001 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.676394 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.693429 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-config-data\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.698359 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-scripts\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.698474 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.698809 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8m69\" (UniqueName: \"kubernetes.io/projected/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-kube-api-access-f8m69\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.699694 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.699839 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-config-data\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.700276 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.703280 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-scripts\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.715599 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.722336 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z57rj\" (UniqueName: \"kubernetes.io/projected/04b6b07b-9136-4036-a8a1-f048b6b41b44-kube-api-access-z57rj\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.725658 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.753218 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.771398 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:43:31 crc kubenswrapper[4690]: I0320 13:43:31.897450 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:32 crc kubenswrapper[4690]: I0320 13:43:32.161982 4690 generic.go:334] "Generic (PLEG): container finished" podID="eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" containerID="6111af7d9d2d98e5bb885344274a5405f8b1848e3df4d659b5d17f660ced3ac9" exitCode=0 Mar 20 13:43:32 crc kubenswrapper[4690]: I0320 13:43:32.162066 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-42k9z" event={"ID":"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007","Type":"ContainerDied","Data":"6111af7d9d2d98e5bb885344274a5405f8b1848e3df4d659b5d17f660ced3ac9"} Mar 20 13:43:32 crc kubenswrapper[4690]: I0320 13:43:32.428042 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c70c11e8-bf26-41f0-9ca7-d135428c216e" path="/var/lib/kubelet/pods/c70c11e8-bf26-41f0-9ca7-d135428c216e/volumes" Mar 20 13:43:32 crc kubenswrapper[4690]: I0320 13:43:32.428661 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e43c11ca-5233-4250-b0cb-8b814c19f794" path="/var/lib/kubelet/pods/e43c11ca-5233-4250-b0cb-8b814c19f794/volumes" Mar 20 13:43:35 crc kubenswrapper[4690]: I0320 13:43:35.946009 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.003556 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-tvmbw"] Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.003806 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" podUID="7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" containerName="dnsmasq-dns" containerID="cri-o://43c5808746fbf3e1dfadb875df49c35f9ef5900dda0c10297072f87eaf4a556f" gracePeriod=10 Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.206534 4690 generic.go:334] "Generic (PLEG): container finished" podID="7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" containerID="43c5808746fbf3e1dfadb875df49c35f9ef5900dda0c10297072f87eaf4a556f" exitCode=0 Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.206572 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" event={"ID":"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9","Type":"ContainerDied","Data":"43c5808746fbf3e1dfadb875df49c35f9ef5900dda0c10297072f87eaf4a556f"} Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.791109 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.800088 4690 scope.go:117] "RemoveContainer" containerID="985ca9be3b374da921ccadaf27a55bca782edbe7d5e28d648f2032cc1faadab2" Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.805966 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.814169 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.814210 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.904085 4690 scope.go:117] "RemoveContainer" containerID="6994593b0fe37bac76e1ada026777d3e58ac8c0b7a2ab2baa1b2069a8ad46fdc" Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.946062 4690 scope.go:117] "RemoveContainer" containerID="41e98532edb7d23551a797b20ca95a8585c0d31423aa05dcb3b3f34216d9a931" Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.987372 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-combined-ca-bundle\") pod \"6629e615-4e98-4e99-b7dc-6990b379d93c\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.987615 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-credential-keys\") pod \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.987690 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-db-sync-config-data\") pod \"6629e615-4e98-4e99-b7dc-6990b379d93c\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.987735 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-scripts\") pod \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.987767 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-combined-ca-bundle\") pod \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.987804 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-config-data\") pod \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.987824 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h76pl\" (UniqueName: \"kubernetes.io/projected/6629e615-4e98-4e99-b7dc-6990b379d93c-kube-api-access-h76pl\") pod \"6629e615-4e98-4e99-b7dc-6990b379d93c\" (UID: \"6629e615-4e98-4e99-b7dc-6990b379d93c\") " Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.987856 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-fernet-keys\") pod \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.987919 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hv9nb\" (UniqueName: \"kubernetes.io/projected/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-kube-api-access-hv9nb\") pod \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\" (UID: \"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007\") " Mar 20 13:43:36 crc kubenswrapper[4690]: I0320 13:43:36.995687 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-scripts" (OuterVolumeSpecName: "scripts") pod "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" (UID: "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.000588 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" (UID: "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.002529 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-kube-api-access-hv9nb" (OuterVolumeSpecName: "kube-api-access-hv9nb") pod "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" (UID: "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007"). InnerVolumeSpecName "kube-api-access-hv9nb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.003104 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6629e615-4e98-4e99-b7dc-6990b379d93c-kube-api-access-h76pl" (OuterVolumeSpecName: "kube-api-access-h76pl") pod "6629e615-4e98-4e99-b7dc-6990b379d93c" (UID: "6629e615-4e98-4e99-b7dc-6990b379d93c"). InnerVolumeSpecName "kube-api-access-h76pl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.014913 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "6629e615-4e98-4e99-b7dc-6990b379d93c" (UID: "6629e615-4e98-4e99-b7dc-6990b379d93c"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.015222 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" (UID: "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.016202 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.016247 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.017676 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7946cd7f64-rm6mr" podUID="ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.153:8443: connect: connection refused" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.023202 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.048047 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-config-data" (OuterVolumeSpecName: "config-data") pod "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" (UID: "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.059533 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" (UID: "eea9b8e9-b9d0-49ca-ad22-aaf7450c1007"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.063789 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6629e615-4e98-4e99-b7dc-6990b379d93c" (UID: "6629e615-4e98-4e99-b7dc-6990b379d93c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.117547 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.117584 4690 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-credential-keys\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.117595 4690 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/6629e615-4e98-4e99-b7dc-6990b379d93c-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.117607 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.117618 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.117630 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.117640 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h76pl\" (UniqueName: \"kubernetes.io/projected/6629e615-4e98-4e99-b7dc-6990b379d93c-kube-api-access-h76pl\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.117652 4690 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-fernet-keys\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.117664 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hv9nb\" (UniqueName: \"kubernetes.io/projected/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007-kube-api-access-hv9nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.218469 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-swift-storage-0\") pod \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.218912 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9gxxr\" (UniqueName: \"kubernetes.io/projected/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-kube-api-access-9gxxr\") pod \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.218953 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-config\") pod \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.218992 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-nb\") pod \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.219048 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-sb\") pod \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.219096 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-svc\") pod \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\" (UID: \"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9\") " Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.235355 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-kube-api-access-9gxxr" (OuterVolumeSpecName: "kube-api-access-9gxxr") pod "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" (UID: "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9"). InnerVolumeSpecName "kube-api-access-9gxxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.255130 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"93013757-d360-41e7-92a9-211155703015","Type":"ContainerStarted","Data":"62a084de7a7c77b0e84395468193dcf066a1779adca2afb483e12cc4a3932ad8"} Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.258762 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" event={"ID":"7bd2b5a2-624d-44da-a2d0-2354b53d7ef9","Type":"ContainerDied","Data":"f8f277a382d93fdab257b751d8c1a0b99beff26e373c9964b2eee2bffecfde85"} Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.258799 4690 scope.go:117] "RemoveContainer" containerID="43c5808746fbf3e1dfadb875df49c35f9ef5900dda0c10297072f87eaf4a556f" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.258902 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56df8fb6b7-tvmbw" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.266148 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-vgnp6" event={"ID":"6629e615-4e98-4e99-b7dc-6990b379d93c","Type":"ContainerDied","Data":"2c3bab4dbfca96331ae9696e484d8bd83307e59d0d795d4b5256caa0134061fc"} Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.266180 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c3bab4dbfca96331ae9696e484d8bd83307e59d0d795d4b5256caa0134061fc" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.266236 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-vgnp6" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.268610 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-42k9z" event={"ID":"eea9b8e9-b9d0-49ca-ad22-aaf7450c1007","Type":"ContainerDied","Data":"df299107022bc523892ceb69fbfffc3c3205b055c7a10aa57a3f4c6dfb9d1e0b"} Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.268658 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df299107022bc523892ceb69fbfffc3c3205b055c7a10aa57a3f4c6dfb9d1e0b" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.268725 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-42k9z" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.279608 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" (UID: "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.281355 4690 scope.go:117] "RemoveContainer" containerID="04d7117df6606306ba94e201715c2698965696f09fd36e752c65d4ddc2b22c24" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.281639 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" (UID: "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.286795 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-config" (OuterVolumeSpecName: "config") pod "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" (UID: "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.289219 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" (UID: "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.307476 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" (UID: "7bd2b5a2-624d-44da-a2d0-2354b53d7ef9"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.321266 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9gxxr\" (UniqueName: \"kubernetes.io/projected/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-kube-api-access-9gxxr\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.321292 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.321302 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.321313 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.321323 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.321331 4690 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.463297 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.485012 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-94bccb6f6-kk87d"] Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.618778 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-tvmbw"] Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.635361 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56df8fb6b7-tvmbw"] Mar 20 13:43:37 crc kubenswrapper[4690]: I0320 13:43:37.760115 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.039575 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7b6fc496fc-2z4sr"] Mar 20 13:43:38 crc kubenswrapper[4690]: E0320 13:43:38.039920 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6629e615-4e98-4e99-b7dc-6990b379d93c" containerName="barbican-db-sync" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.039935 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="6629e615-4e98-4e99-b7dc-6990b379d93c" containerName="barbican-db-sync" Mar 20 13:43:38 crc kubenswrapper[4690]: E0320 13:43:38.039950 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" containerName="keystone-bootstrap" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.039956 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" containerName="keystone-bootstrap" Mar 20 13:43:38 crc kubenswrapper[4690]: E0320 13:43:38.039985 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" containerName="dnsmasq-dns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.039993 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" containerName="dnsmasq-dns" Mar 20 13:43:38 crc kubenswrapper[4690]: E0320 13:43:38.040003 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" containerName="init" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.040009 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" containerName="init" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.057350 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" containerName="dnsmasq-dns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.057412 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="6629e615-4e98-4e99-b7dc-6990b379d93c" containerName="barbican-db-sync" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.057438 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" containerName="keystone-bootstrap" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.057955 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7b6fc496fc-2z4sr"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.058033 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.066061 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.066520 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.066810 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.068662 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.068805 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.069568 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-cwsn8" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.159965 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-d9f9c5f67-rdpbv"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.161399 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.166219 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.166337 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-zgphg" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.166237 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.189792 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-68cf44d874-dw9jz"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.191210 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.199875 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.239598 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-68cf44d874-dw9jz"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.248812 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-credential-keys\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.248873 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data-custom\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.248902 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-logs\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.248921 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-scripts\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.248938 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.248957 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgrtt\" (UniqueName: \"kubernetes.io/projected/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-kube-api-access-kgrtt\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.248982 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249003 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q5c4n\" (UniqueName: \"kubernetes.io/projected/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-kube-api-access-q5c4n\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249027 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-internal-tls-certs\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249047 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-fernet-keys\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249065 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data-custom\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249095 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-logs\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249124 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-combined-ca-bundle\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249152 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-combined-ca-bundle\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249173 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-config-data\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249187 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-combined-ca-bundle\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249211 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tncn2\" (UniqueName: \"kubernetes.io/projected/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-kube-api-access-tncn2\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.249254 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-public-tls-certs\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.273917 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-d9f9c5f67-rdpbv"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.311601 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c","Type":"ContainerStarted","Data":"c131b8c03ae6bac2296276ccb47ffc69669081b74786d04928f1686974077ebf"} Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.314673 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"04b6b07b-9136-4036-a8a1-f048b6b41b44","Type":"ContainerStarted","Data":"645065f0467b2d067ac9478c8979fd92f7c0745ef05a89212c583dc8945c10e6"} Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.323923 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94bccb6f6-kk87d" event={"ID":"d4bce072-cb50-4167-92cc-eab9c8501d2d","Type":"ContainerStarted","Data":"7bbc8a49430ac1bcb64f9e5094fe0d4e5e4f42ca1d09cadc16710f57055b6798"} Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.323978 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94bccb6f6-kk87d" event={"ID":"d4bce072-cb50-4167-92cc-eab9c8501d2d","Type":"ContainerStarted","Data":"a93071143859a1d7a3264d23abb7d7008733fb3798f964f10419cac7bbd6f05d"} Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.353502 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-xd7ns"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355434 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-credential-keys\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355469 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data-custom\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355500 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-logs\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355516 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-scripts\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355533 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355551 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgrtt\" (UniqueName: \"kubernetes.io/projected/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-kube-api-access-kgrtt\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355576 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355598 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q5c4n\" (UniqueName: \"kubernetes.io/projected/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-kube-api-access-q5c4n\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355615 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-internal-tls-certs\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355631 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-fernet-keys\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355647 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data-custom\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.355674 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-logs\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.356126 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-logs\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.356182 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-combined-ca-bundle\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.356280 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-combined-ca-bundle\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.356334 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-config-data\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.356365 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-combined-ca-bundle\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.356420 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tncn2\" (UniqueName: \"kubernetes.io/projected/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-kube-api-access-tncn2\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.356468 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-public-tls-certs\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.360247 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-public-tls-certs\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.360695 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.360699 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-credential-keys\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.366614 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-logs\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.371404 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-internal-tls-certs\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.379619 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data-custom\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.379891 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data-custom\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.380600 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-scripts\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.387483 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-combined-ca-bundle\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.397836 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-xd7ns"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.415021 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-fernet-keys\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.428293 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgrtt\" (UniqueName: \"kubernetes.io/projected/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-kube-api-access-kgrtt\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.430506 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-config-data\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.431601 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data\") pod \"barbican-worker-d9f9c5f67-rdpbv\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.431621 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-combined-ca-bundle\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.432111 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tncn2\" (UniqueName: \"kubernetes.io/projected/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-kube-api-access-tncn2\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.437454 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q5c4n\" (UniqueName: \"kubernetes.io/projected/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-kube-api-access-q5c4n\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.440687 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data\") pod \"barbican-keystone-listener-68cf44d874-dw9jz\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.446792 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa-combined-ca-bundle\") pod \"keystone-7b6fc496fc-2z4sr\" (UID: \"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa\") " pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.450768 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bd2b5a2-624d-44da-a2d0-2354b53d7ef9" path="/var/lib/kubelet/pods/7bd2b5a2-624d-44da-a2d0-2354b53d7ef9/volumes" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.452067 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-6799cfc5db-x4fzm"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.462375 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.469921 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.470195 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.470305 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69nqh\" (UniqueName: \"kubernetes.io/projected/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-kube-api-access-69nqh\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.470430 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.470568 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.470627 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-config\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.521707 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6799cfc5db-x4fzm"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.536939 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.543700 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-854468899-d6c5x"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.545437 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.552503 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.572684 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrszg\" (UniqueName: \"kubernetes.io/projected/73fc017b-172e-4785-850e-2146a070b915-kube-api-access-wrszg\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.572727 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.572834 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.572871 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73fc017b-172e-4785-850e-2146a070b915-combined-ca-bundle\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.572891 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69nqh\" (UniqueName: \"kubernetes.io/projected/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-kube-api-access-69nqh\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.572907 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73fc017b-172e-4785-850e-2146a070b915-logs\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.572939 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.572963 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73fc017b-172e-4785-850e-2146a070b915-config-data\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.573037 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-config\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.573076 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73fc017b-172e-4785-850e-2146a070b915-config-data-custom\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.573104 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.579562 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-nb\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.586396 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-config\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.599729 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-swift-storage-0\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.600932 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-sb\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.602512 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-854468899-d6c5x"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.609440 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-svc\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.628012 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69nqh\" (UniqueName: \"kubernetes.io/projected/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-kube-api-access-69nqh\") pod \"dnsmasq-dns-848cf88cfc-xd7ns\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.653652 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-8b4d55c86-ddn7b"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.655409 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.657743 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.674671 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/381e146e-ca50-42cb-9e5b-e4e794c77d28-config-data\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.674796 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73fc017b-172e-4785-850e-2146a070b915-config-data-custom\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.674873 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/381e146e-ca50-42cb-9e5b-e4e794c77d28-config-data-custom\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.674978 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/381e146e-ca50-42cb-9e5b-e4e794c77d28-combined-ca-bundle\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.675006 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrszg\" (UniqueName: \"kubernetes.io/projected/73fc017b-172e-4785-850e-2146a070b915-kube-api-access-wrszg\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.675026 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8vz7t\" (UniqueName: \"kubernetes.io/projected/381e146e-ca50-42cb-9e5b-e4e794c77d28-kube-api-access-8vz7t\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.675137 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/381e146e-ca50-42cb-9e5b-e4e794c77d28-logs\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.675160 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73fc017b-172e-4785-850e-2146a070b915-combined-ca-bundle\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.675178 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73fc017b-172e-4785-850e-2146a070b915-logs\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.675228 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73fc017b-172e-4785-850e-2146a070b915-config-data\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.675852 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-8b4d55c86-ddn7b"] Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.679514 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/73fc017b-172e-4785-850e-2146a070b915-logs\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.681279 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/73fc017b-172e-4785-850e-2146a070b915-config-data\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.683218 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/73fc017b-172e-4785-850e-2146a070b915-config-data-custom\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.703482 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.712990 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrszg\" (UniqueName: \"kubernetes.io/projected/73fc017b-172e-4785-850e-2146a070b915-kube-api-access-wrszg\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.722471 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/73fc017b-172e-4785-850e-2146a070b915-combined-ca-bundle\") pod \"barbican-keystone-listener-6799cfc5db-x4fzm\" (UID: \"73fc017b-172e-4785-850e-2146a070b915\") " pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.739489 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790360 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3f864c3-ce54-42e4-b324-f488eea1fadc-logs\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790445 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/381e146e-ca50-42cb-9e5b-e4e794c77d28-config-data-custom\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790495 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/381e146e-ca50-42cb-9e5b-e4e794c77d28-combined-ca-bundle\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790524 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790547 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-combined-ca-bundle\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790578 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8vz7t\" (UniqueName: \"kubernetes.io/projected/381e146e-ca50-42cb-9e5b-e4e794c77d28-kube-api-access-8vz7t\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790618 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d5ds\" (UniqueName: \"kubernetes.io/projected/e3f864c3-ce54-42e4-b324-f488eea1fadc-kube-api-access-6d5ds\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790689 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data-custom\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790720 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/381e146e-ca50-42cb-9e5b-e4e794c77d28-logs\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.790792 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/381e146e-ca50-42cb-9e5b-e4e794c77d28-config-data\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.792225 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/381e146e-ca50-42cb-9e5b-e4e794c77d28-logs\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.795072 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/381e146e-ca50-42cb-9e5b-e4e794c77d28-config-data-custom\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.803515 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/381e146e-ca50-42cb-9e5b-e4e794c77d28-combined-ca-bundle\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.804541 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/381e146e-ca50-42cb-9e5b-e4e794c77d28-config-data\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.818445 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8vz7t\" (UniqueName: \"kubernetes.io/projected/381e146e-ca50-42cb-9e5b-e4e794c77d28-kube-api-access-8vz7t\") pod \"barbican-worker-854468899-d6c5x\" (UID: \"381e146e-ca50-42cb-9e5b-e4e794c77d28\") " pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.827854 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.883320 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-854468899-d6c5x" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.893312 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data-custom\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.893410 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3f864c3-ce54-42e4-b324-f488eea1fadc-logs\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.893462 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.893479 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-combined-ca-bundle\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.893509 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d5ds\" (UniqueName: \"kubernetes.io/projected/e3f864c3-ce54-42e4-b324-f488eea1fadc-kube-api-access-6d5ds\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.896031 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3f864c3-ce54-42e4-b324-f488eea1fadc-logs\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.900022 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.914809 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d5ds\" (UniqueName: \"kubernetes.io/projected/e3f864c3-ce54-42e4-b324-f488eea1fadc-kube-api-access-6d5ds\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.916821 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data-custom\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:38 crc kubenswrapper[4690]: I0320 13:43:38.919273 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-combined-ca-bundle\") pod \"barbican-api-8b4d55c86-ddn7b\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.171051 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.285541 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-d9f9c5f67-rdpbv"] Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.381113 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" event={"ID":"f369e2b4-2fa6-42fb-b77e-869dcf0ad829","Type":"ContainerStarted","Data":"70258873d05ff8a9d173fd7a961cd1b3ffcb278982c5c58dc6cbf23c0ddfc662"} Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.387026 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c","Type":"ContainerStarted","Data":"c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b"} Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.398651 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94bccb6f6-kk87d" event={"ID":"d4bce072-cb50-4167-92cc-eab9c8501d2d","Type":"ContainerStarted","Data":"d75dc827af97983c938e5fdd672557ac4733979363f2210d2b851349544f20bf"} Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.399630 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.399656 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.436522 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-94bccb6f6-kk87d" podStartSLOduration=8.436499779 podStartE2EDuration="8.436499779s" podCreationTimestamp="2026-03-20 13:43:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:39.425555946 +0000 UTC m=+1265.715155879" watchObservedRunningTime="2026-03-20 13:43:39.436499779 +0000 UTC m=+1265.726099722" Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.456549 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-68cf44d874-dw9jz"] Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.641915 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7b6fc496fc-2z4sr"] Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.786715 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-xd7ns"] Mar 20 13:43:39 crc kubenswrapper[4690]: W0320 13:43:39.882126 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod381e146e_ca50_42cb_9e5b_e4e794c77d28.slice/crio-21ecfe9d5af8a16261ef6163851b512dfed4498ca872e0a1e311c05de82c9125 WatchSource:0}: Error finding container 21ecfe9d5af8a16261ef6163851b512dfed4498ca872e0a1e311c05de82c9125: Status 404 returned error can't find the container with id 21ecfe9d5af8a16261ef6163851b512dfed4498ca872e0a1e311c05de82c9125 Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.886168 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-854468899-d6c5x"] Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.897289 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-8b4d55c86-ddn7b"] Mar 20 13:43:39 crc kubenswrapper[4690]: W0320 13:43:39.904706 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode3f864c3_ce54_42e4_b324_f488eea1fadc.slice/crio-d36013c6617c880fa8b937d7ffe8b64a634c7d21e5d87e0074c47ac15bc411e7 WatchSource:0}: Error finding container d36013c6617c880fa8b937d7ffe8b64a634c7d21e5d87e0074c47ac15bc411e7: Status 404 returned error can't find the container with id d36013c6617c880fa8b937d7ffe8b64a634c7d21e5d87e0074c47ac15bc411e7 Mar 20 13:43:39 crc kubenswrapper[4690]: I0320 13:43:39.905166 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-6799cfc5db-x4fzm"] Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.436770 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"04b6b07b-9136-4036-a8a1-f048b6b41b44","Type":"ContainerStarted","Data":"7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.448629 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" event={"ID":"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec","Type":"ContainerStarted","Data":"04942e888f50463d4f86c3771f38e5d2ce16420611509c47eef8c4dcaa4efbaf"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.457891 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8b4d55c86-ddn7b" event={"ID":"e3f864c3-ce54-42e4-b324-f488eea1fadc","Type":"ContainerStarted","Data":"78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.457951 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8b4d55c86-ddn7b" event={"ID":"e3f864c3-ce54-42e4-b324-f488eea1fadc","Type":"ContainerStarted","Data":"d36013c6617c880fa8b937d7ffe8b64a634c7d21e5d87e0074c47ac15bc411e7"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.461262 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-854468899-d6c5x" event={"ID":"381e146e-ca50-42cb-9e5b-e4e794c77d28","Type":"ContainerStarted","Data":"21ecfe9d5af8a16261ef6163851b512dfed4498ca872e0a1e311c05de82c9125"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.472353 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" event={"ID":"73fc017b-172e-4785-850e-2146a070b915","Type":"ContainerStarted","Data":"51ed95edb95e7256530462546d4e3cadcc649b79dce98c8e50200096212de10a"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.476242 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c","Type":"ContainerStarted","Data":"ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.495716 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" event={"ID":"b5b2037e-3b1c-491f-9f12-d8e907ed85fc","Type":"ContainerStarted","Data":"cfe1ff26fa73dff508593f33073564e46a0263b82fd88d492172febe57fcb9b5"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.511945 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7b6fc496fc-2z4sr" event={"ID":"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa","Type":"ContainerStarted","Data":"fddba558fd67b369319814682f56ec1b9a54d72040c4d53d4fc2bc715090c3e8"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.511979 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7b6fc496fc-2z4sr" event={"ID":"8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa","Type":"ContainerStarted","Data":"245a4c65a8e9e5e2f9c55c1491344ef16b4bf83b14c7223351b0bcdf3813b50a"} Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.512004 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.531257 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.531235723 podStartE2EDuration="9.531235723s" podCreationTimestamp="2026-03-20 13:43:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:40.507726192 +0000 UTC m=+1266.797326135" watchObservedRunningTime="2026-03-20 13:43:40.531235723 +0000 UTC m=+1266.820835676" Mar 20 13:43:40 crc kubenswrapper[4690]: I0320 13:43:40.538642 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7b6fc496fc-2z4sr" podStartSLOduration=2.538625293 podStartE2EDuration="2.538625293s" podCreationTimestamp="2026-03-20 13:43:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:40.52938916 +0000 UTC m=+1266.818989103" watchObservedRunningTime="2026-03-20 13:43:40.538625293 +0000 UTC m=+1266.828225226" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.531601 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-6rgrr" event={"ID":"3def27d2-bdda-4c07-b4b2-f695994bd509","Type":"ContainerStarted","Data":"a2ec647149e8258ed55ef0a1b018da90f2cf1939843a8b2d91b52e6e2d164078"} Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.537820 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"04b6b07b-9136-4036-a8a1-f048b6b41b44","Type":"ContainerStarted","Data":"592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5"} Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.539249 4690 generic.go:334] "Generic (PLEG): container finished" podID="1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" containerID="0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af" exitCode=0 Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.539301 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" event={"ID":"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec","Type":"ContainerDied","Data":"0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af"} Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.542089 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8b4d55c86-ddn7b" event={"ID":"e3f864c3-ce54-42e4-b324-f488eea1fadc","Type":"ContainerStarted","Data":"ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba"} Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.542127 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.543056 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.553670 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-6rgrr" podStartSLOduration=4.291854024 podStartE2EDuration="44.553654845s" podCreationTimestamp="2026-03-20 13:42:57 +0000 UTC" firstStartedPulling="2026-03-20 13:42:59.061213051 +0000 UTC m=+1225.350812994" lastFinishedPulling="2026-03-20 13:43:39.323013872 +0000 UTC m=+1265.612613815" observedRunningTime="2026-03-20 13:43:41.54857537 +0000 UTC m=+1267.838175313" watchObservedRunningTime="2026-03-20 13:43:41.553654845 +0000 UTC m=+1267.843254788" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.587601 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=10.587585333 podStartE2EDuration="10.587585333s" podCreationTimestamp="2026-03-20 13:43:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:41.584284359 +0000 UTC m=+1267.873884302" watchObservedRunningTime="2026-03-20 13:43:41.587585333 +0000 UTC m=+1267.877185276" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.619683 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-8b4d55c86-ddn7b" podStartSLOduration=3.619664428 podStartE2EDuration="3.619664428s" podCreationTimestamp="2026-03-20 13:43:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:41.612878314 +0000 UTC m=+1267.902478257" watchObservedRunningTime="2026-03-20 13:43:41.619664428 +0000 UTC m=+1267.909264371" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.661786 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-74474d96d6-48nxq"] Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.665744 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.668374 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.674216 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.688738 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-74474d96d6-48nxq"] Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.769270 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-public-tls-certs\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.769321 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-config-data-custom\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.769400 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-combined-ca-bundle\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.769457 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-internal-tls-certs\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.769475 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-config-data\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.769605 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jjsbc\" (UniqueName: \"kubernetes.io/projected/1e178592-3eb7-4d02-8a14-08d18a96e289-kube-api-access-jjsbc\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.769625 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e178592-3eb7-4d02-8a14-08d18a96e289-logs\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.772630 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.772667 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.824987 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.826111 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.871080 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jjsbc\" (UniqueName: \"kubernetes.io/projected/1e178592-3eb7-4d02-8a14-08d18a96e289-kube-api-access-jjsbc\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.871130 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e178592-3eb7-4d02-8a14-08d18a96e289-logs\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.871221 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-public-tls-certs\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.871246 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-config-data-custom\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.871286 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-combined-ca-bundle\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.871682 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1e178592-3eb7-4d02-8a14-08d18a96e289-logs\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.872125 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-internal-tls-certs\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.872154 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-config-data\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.879962 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-combined-ca-bundle\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.880725 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-public-tls-certs\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.881926 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-config-data-custom\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.882033 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-config-data\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.883335 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/1e178592-3eb7-4d02-8a14-08d18a96e289-internal-tls-certs\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.897710 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.897817 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.898504 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jjsbc\" (UniqueName: \"kubernetes.io/projected/1e178592-3eb7-4d02-8a14-08d18a96e289-kube-api-access-jjsbc\") pod \"barbican-api-74474d96d6-48nxq\" (UID: \"1e178592-3eb7-4d02-8a14-08d18a96e289\") " pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.965771 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.979463 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:41 crc kubenswrapper[4690]: I0320 13:43:41.993905 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:42 crc kubenswrapper[4690]: I0320 13:43:42.560637 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Mar 20 13:43:42 crc kubenswrapper[4690]: I0320 13:43:42.561137 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Mar 20 13:43:42 crc kubenswrapper[4690]: I0320 13:43:42.561151 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:42 crc kubenswrapper[4690]: I0320 13:43:42.561160 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.000280 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-74474d96d6-48nxq"] Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.584580 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-854468899-d6c5x" event={"ID":"381e146e-ca50-42cb-9e5b-e4e794c77d28","Type":"ContainerStarted","Data":"4dcaa2a343703f9b8169db4d707880b3bf7f8636a339eaec615fc8de680372d4"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.585096 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-854468899-d6c5x" event={"ID":"381e146e-ca50-42cb-9e5b-e4e794c77d28","Type":"ContainerStarted","Data":"b6e9ce5ce4ace6930238695d987f5e3ffe171efaa749d60799e4215e92cf2043"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.600175 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" event={"ID":"73fc017b-172e-4785-850e-2146a070b915","Type":"ContainerStarted","Data":"6ab9e6cb1e8dffd1cbb37655982eec4799b4d4ff31762cc3bfd90516bf71def2"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.600220 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" event={"ID":"73fc017b-172e-4785-850e-2146a070b915","Type":"ContainerStarted","Data":"f0c81c9ed01b651802e1e537319aef3786c010dc278c37c139f41af6081b0736"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.608275 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-74474d96d6-48nxq" event={"ID":"1e178592-3eb7-4d02-8a14-08d18a96e289","Type":"ContainerStarted","Data":"c862ee825c9b2ce593f9567144f8b77f8bf13a1662a8f56a82c7f1dd9eb17082"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.608322 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-74474d96d6-48nxq" event={"ID":"1e178592-3eb7-4d02-8a14-08d18a96e289","Type":"ContainerStarted","Data":"379c42804fc91e8e743006ec89e3b2892cddc5aac7b5afbff1270f4c2cf78877"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.612381 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-854468899-d6c5x" podStartSLOduration=2.979969503 podStartE2EDuration="5.612366185s" podCreationTimestamp="2026-03-20 13:43:38 +0000 UTC" firstStartedPulling="2026-03-20 13:43:39.888140251 +0000 UTC m=+1266.177740194" lastFinishedPulling="2026-03-20 13:43:42.520536943 +0000 UTC m=+1268.810136876" observedRunningTime="2026-03-20 13:43:43.611412538 +0000 UTC m=+1269.901012481" watchObservedRunningTime="2026-03-20 13:43:43.612366185 +0000 UTC m=+1269.901966128" Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.620023 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" event={"ID":"b5b2037e-3b1c-491f-9f12-d8e907ed85fc","Type":"ContainerStarted","Data":"faf18290be020f3e34b6964a854b426d9f340df5530794afc752b6d49a1dbf49"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.620057 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" event={"ID":"b5b2037e-3b1c-491f-9f12-d8e907ed85fc","Type":"ContainerStarted","Data":"f3c5d1a0bf01700e5502b96f2ff7a48e6fea76c8a18260189115cee4258010fd"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.628677 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-6799cfc5db-x4fzm" podStartSLOduration=3.027355514 podStartE2EDuration="5.62866685s" podCreationTimestamp="2026-03-20 13:43:38 +0000 UTC" firstStartedPulling="2026-03-20 13:43:39.918053624 +0000 UTC m=+1266.207653567" lastFinishedPulling="2026-03-20 13:43:42.51936496 +0000 UTC m=+1268.808964903" observedRunningTime="2026-03-20 13:43:43.626878659 +0000 UTC m=+1269.916478612" watchObservedRunningTime="2026-03-20 13:43:43.62866685 +0000 UTC m=+1269.918266793" Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.663012 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-d9f9c5f67-rdpbv"] Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.665548 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" event={"ID":"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec","Type":"ContainerStarted","Data":"a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.665868 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.679596 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" event={"ID":"f369e2b4-2fa6-42fb-b77e-869dcf0ad829","Type":"ContainerStarted","Data":"8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.679632 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" event={"ID":"f369e2b4-2fa6-42fb-b77e-869dcf0ad829","Type":"ContainerStarted","Data":"87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32"} Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.688656 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-68cf44d874-dw9jz"] Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.717301 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" podStartSLOduration=2.693233834 podStartE2EDuration="5.717281838s" podCreationTimestamp="2026-03-20 13:43:38 +0000 UTC" firstStartedPulling="2026-03-20 13:43:39.497451757 +0000 UTC m=+1265.787051700" lastFinishedPulling="2026-03-20 13:43:42.521499761 +0000 UTC m=+1268.811099704" observedRunningTime="2026-03-20 13:43:43.665336916 +0000 UTC m=+1269.954936849" watchObservedRunningTime="2026-03-20 13:43:43.717281838 +0000 UTC m=+1270.006881781" Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.754115 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" podStartSLOduration=5.754097278 podStartE2EDuration="5.754097278s" podCreationTimestamp="2026-03-20 13:43:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:43.687411116 +0000 UTC m=+1269.977011059" watchObservedRunningTime="2026-03-20 13:43:43.754097278 +0000 UTC m=+1270.043697221" Mar 20 13:43:43 crc kubenswrapper[4690]: I0320 13:43:43.760244 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" podStartSLOduration=2.60052742 podStartE2EDuration="5.760228103s" podCreationTimestamp="2026-03-20 13:43:38 +0000 UTC" firstStartedPulling="2026-03-20 13:43:39.357275429 +0000 UTC m=+1265.646875372" lastFinishedPulling="2026-03-20 13:43:42.516976112 +0000 UTC m=+1268.806576055" observedRunningTime="2026-03-20 13:43:43.704427651 +0000 UTC m=+1269.994027594" watchObservedRunningTime="2026-03-20 13:43:43.760228103 +0000 UTC m=+1270.049828046" Mar 20 13:43:44 crc kubenswrapper[4690]: I0320 13:43:44.698179 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-74474d96d6-48nxq" event={"ID":"1e178592-3eb7-4d02-8a14-08d18a96e289","Type":"ContainerStarted","Data":"ef24af9934dcc38b19e8408ceff30d159638939ef0a0c70bded29464b071a592"} Mar 20 13:43:44 crc kubenswrapper[4690]: I0320 13:43:44.698746 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:44 crc kubenswrapper[4690]: I0320 13:43:44.699150 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" podUID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerName="barbican-worker-log" containerID="cri-o://87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32" gracePeriod=30 Mar 20 13:43:44 crc kubenswrapper[4690]: I0320 13:43:44.699176 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" podUID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerName="barbican-worker" containerID="cri-o://8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836" gracePeriod=30 Mar 20 13:43:44 crc kubenswrapper[4690]: I0320 13:43:44.721588 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-74474d96d6-48nxq" podStartSLOduration=3.7215654320000002 podStartE2EDuration="3.721565432s" podCreationTimestamp="2026-03-20 13:43:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:44.720047039 +0000 UTC m=+1271.009646982" watchObservedRunningTime="2026-03-20 13:43:44.721565432 +0000 UTC m=+1271.011165375" Mar 20 13:43:45 crc kubenswrapper[4690]: I0320 13:43:45.708590 4690 generic.go:334] "Generic (PLEG): container finished" podID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerID="87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32" exitCode=143 Mar 20 13:43:45 crc kubenswrapper[4690]: I0320 13:43:45.708677 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" event={"ID":"f369e2b4-2fa6-42fb-b77e-869dcf0ad829","Type":"ContainerDied","Data":"87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32"} Mar 20 13:43:45 crc kubenswrapper[4690]: I0320 13:43:45.709188 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:45 crc kubenswrapper[4690]: I0320 13:43:45.709415 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" podUID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerName="barbican-keystone-listener" containerID="cri-o://faf18290be020f3e34b6964a854b426d9f340df5530794afc752b6d49a1dbf49" gracePeriod=30 Mar 20 13:43:45 crc kubenswrapper[4690]: I0320 13:43:45.709353 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" podUID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerName="barbican-keystone-listener-log" containerID="cri-o://f3c5d1a0bf01700e5502b96f2ff7a48e6fea76c8a18260189115cee4258010fd" gracePeriod=30 Mar 20 13:43:46 crc kubenswrapper[4690]: I0320 13:43:46.041544 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:46 crc kubenswrapper[4690]: I0320 13:43:46.718215 4690 generic.go:334] "Generic (PLEG): container finished" podID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerID="faf18290be020f3e34b6964a854b426d9f340df5530794afc752b6d49a1dbf49" exitCode=0 Mar 20 13:43:46 crc kubenswrapper[4690]: I0320 13:43:46.718256 4690 generic.go:334] "Generic (PLEG): container finished" podID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerID="f3c5d1a0bf01700e5502b96f2ff7a48e6fea76c8a18260189115cee4258010fd" exitCode=143 Mar 20 13:43:46 crc kubenswrapper[4690]: I0320 13:43:46.718284 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" event={"ID":"b5b2037e-3b1c-491f-9f12-d8e907ed85fc","Type":"ContainerDied","Data":"faf18290be020f3e34b6964a854b426d9f340df5530794afc752b6d49a1dbf49"} Mar 20 13:43:46 crc kubenswrapper[4690]: I0320 13:43:46.718324 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" event={"ID":"b5b2037e-3b1c-491f-9f12-d8e907ed85fc","Type":"ContainerDied","Data":"f3c5d1a0bf01700e5502b96f2ff7a48e6fea76c8a18260189115cee4258010fd"} Mar 20 13:43:46 crc kubenswrapper[4690]: I0320 13:43:46.720022 4690 generic.go:334] "Generic (PLEG): container finished" podID="3def27d2-bdda-4c07-b4b2-f695994bd509" containerID="a2ec647149e8258ed55ef0a1b018da90f2cf1939843a8b2d91b52e6e2d164078" exitCode=0 Mar 20 13:43:46 crc kubenswrapper[4690]: I0320 13:43:46.720765 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-6rgrr" event={"ID":"3def27d2-bdda-4c07-b4b2-f695994bd509","Type":"ContainerDied","Data":"a2ec647149e8258ed55ef0a1b018da90f2cf1939843a8b2d91b52e6e2d164078"} Mar 20 13:43:46 crc kubenswrapper[4690]: I0320 13:43:46.817262 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-587c585984-xs7nl" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Mar 20 13:43:46 crc kubenswrapper[4690]: I0320 13:43:46.946144 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/barbican-api-8b4d55c86-ddn7b" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api-log" probeResult="failure" output="HTTP probe failed with statuscode: 500" Mar 20 13:43:47 crc kubenswrapper[4690]: I0320 13:43:47.016600 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-7946cd7f64-rm6mr" podUID="ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.153:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.153:8443: connect: connection refused" Mar 20 13:43:47 crc kubenswrapper[4690]: I0320 13:43:47.148129 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Mar 20 13:43:47 crc kubenswrapper[4690]: I0320 13:43:47.822492 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Mar 20 13:43:48 crc kubenswrapper[4690]: I0320 13:43:48.743195 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:43:48 crc kubenswrapper[4690]: I0320 13:43:48.848377 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-h7ptl"] Mar 20 13:43:48 crc kubenswrapper[4690]: I0320 13:43:48.848636 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" podUID="5e266b86-47ff-435f-b619-baa374a78476" containerName="dnsmasq-dns" containerID="cri-o://ea454ed32cf5432c0ce784a14841615e0b4764b96a40182ae7f47a05e4b4447a" gracePeriod=10 Mar 20 13:43:48 crc kubenswrapper[4690]: I0320 13:43:48.990774 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.069327 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.132018 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-config-data\") pod \"3def27d2-bdda-4c07-b4b2-f695994bd509\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.132075 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-combined-ca-bundle\") pod \"3def27d2-bdda-4c07-b4b2-f695994bd509\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.132108 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-db-sync-config-data\") pod \"3def27d2-bdda-4c07-b4b2-f695994bd509\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.132184 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mdcnd\" (UniqueName: \"kubernetes.io/projected/3def27d2-bdda-4c07-b4b2-f695994bd509-kube-api-access-mdcnd\") pod \"3def27d2-bdda-4c07-b4b2-f695994bd509\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.132271 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3def27d2-bdda-4c07-b4b2-f695994bd509-etc-machine-id\") pod \"3def27d2-bdda-4c07-b4b2-f695994bd509\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.132295 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-scripts\") pod \"3def27d2-bdda-4c07-b4b2-f695994bd509\" (UID: \"3def27d2-bdda-4c07-b4b2-f695994bd509\") " Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.133423 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3def27d2-bdda-4c07-b4b2-f695994bd509-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3def27d2-bdda-4c07-b4b2-f695994bd509" (UID: "3def27d2-bdda-4c07-b4b2-f695994bd509"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.141976 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3def27d2-bdda-4c07-b4b2-f695994bd509-kube-api-access-mdcnd" (OuterVolumeSpecName: "kube-api-access-mdcnd") pod "3def27d2-bdda-4c07-b4b2-f695994bd509" (UID: "3def27d2-bdda-4c07-b4b2-f695994bd509"). InnerVolumeSpecName "kube-api-access-mdcnd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.145162 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "3def27d2-bdda-4c07-b4b2-f695994bd509" (UID: "3def27d2-bdda-4c07-b4b2-f695994bd509"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.149668 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-scripts" (OuterVolumeSpecName: "scripts") pod "3def27d2-bdda-4c07-b4b2-f695994bd509" (UID: "3def27d2-bdda-4c07-b4b2-f695994bd509"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.192278 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "3def27d2-bdda-4c07-b4b2-f695994bd509" (UID: "3def27d2-bdda-4c07-b4b2-f695994bd509"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.196004 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-config-data" (OuterVolumeSpecName: "config-data") pod "3def27d2-bdda-4c07-b4b2-f695994bd509" (UID: "3def27d2-bdda-4c07-b4b2-f695994bd509"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.235407 4690 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3def27d2-bdda-4c07-b4b2-f695994bd509-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.235488 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.235500 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.235511 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.235522 4690 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/3def27d2-bdda-4c07-b4b2-f695994bd509-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.235533 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mdcnd\" (UniqueName: \"kubernetes.io/projected/3def27d2-bdda-4c07-b4b2-f695994bd509-kube-api-access-mdcnd\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.749534 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-6rgrr" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.749539 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-6rgrr" event={"ID":"3def27d2-bdda-4c07-b4b2-f695994bd509","Type":"ContainerDied","Data":"48aaaa77378b8c665204b752ab8dae8e229a5b47d26cbc496c4d6280d5f8650a"} Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.749963 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48aaaa77378b8c665204b752ab8dae8e229a5b47d26cbc496c4d6280d5f8650a" Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.753236 4690 generic.go:334] "Generic (PLEG): container finished" podID="5e266b86-47ff-435f-b619-baa374a78476" containerID="ea454ed32cf5432c0ce784a14841615e0b4764b96a40182ae7f47a05e4b4447a" exitCode=0 Mar 20 13:43:49 crc kubenswrapper[4690]: I0320 13:43:49.753284 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" event={"ID":"5e266b86-47ff-435f-b619-baa374a78476","Type":"ContainerDied","Data":"ea454ed32cf5432c0ce784a14841615e0b4764b96a40182ae7f47a05e4b4447a"} Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.215752 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Mar 20 13:43:50 crc kubenswrapper[4690]: E0320 13:43:50.220308 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3def27d2-bdda-4c07-b4b2-f695994bd509" containerName="cinder-db-sync" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.220428 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="3def27d2-bdda-4c07-b4b2-f695994bd509" containerName="cinder-db-sync" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.220708 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="3def27d2-bdda-4c07-b4b2-f695994bd509" containerName="cinder-db-sync" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.223508 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.228301 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.228500 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-4pfnp" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.228685 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.229053 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.244900 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.261568 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.261612 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh8mt\" (UniqueName: \"kubernetes.io/projected/fb3cd5da-dd91-4226-9f58-85a1bb729397-kube-api-access-wh8mt\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.261661 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.261702 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.261732 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb3cd5da-dd91-4226-9f58-85a1bb729397-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.261754 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-scripts\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.291176 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-n9p8r"] Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.292569 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.318466 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-n9p8r"] Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.362786 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.362842 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb3cd5da-dd91-4226-9f58-85a1bb729397-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.362882 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-scripts\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.362945 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.362966 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh8mt\" (UniqueName: \"kubernetes.io/projected/fb3cd5da-dd91-4226-9f58-85a1bb729397-kube-api-access-wh8mt\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.363011 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.363214 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb3cd5da-dd91-4226-9f58-85a1bb729397-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.369578 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.369995 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-scripts\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.372235 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.374429 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.387245 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh8mt\" (UniqueName: \"kubernetes.io/projected/fb3cd5da-dd91-4226-9f58-85a1bb729397-kube-api-access-wh8mt\") pod \"cinder-scheduler-0\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.464992 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.465046 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.465084 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.465168 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-config\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.465191 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-svc\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.465223 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44g7f\" (UniqueName: \"kubernetes.io/projected/35385d42-f164-4605-8a55-290d5acc5192-kube-api-access-44g7f\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.567112 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-config\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.567161 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-svc\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.567199 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44g7f\" (UniqueName: \"kubernetes.io/projected/35385d42-f164-4605-8a55-290d5acc5192-kube-api-access-44g7f\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.567336 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.567355 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.567404 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.570596 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-sb\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.571271 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.572215 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-swift-storage-0\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.572363 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-svc\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.573158 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-config\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.573776 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-nb\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.628226 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.635431 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.642143 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44g7f\" (UniqueName: \"kubernetes.io/projected/35385d42-f164-4605-8a55-290d5acc5192-kube-api-access-44g7f\") pod \"dnsmasq-dns-6578955fd5-n9p8r\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.642352 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.646995 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.741096 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.770876 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" event={"ID":"b5b2037e-3b1c-491f-9f12-d8e907ed85fc","Type":"ContainerDied","Data":"cfe1ff26fa73dff508593f33073564e46a0263b82fd88d492172febe57fcb9b5"} Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.770928 4690 scope.go:117] "RemoveContainer" containerID="faf18290be020f3e34b6964a854b426d9f340df5530794afc752b6d49a1dbf49" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.771100 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-68cf44d874-dw9jz" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.772441 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748e219a-0f6d-4ab3-aa46-e62b6310ca30-logs\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.772488 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-scripts\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.772520 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qd4j8\" (UniqueName: \"kubernetes.io/projected/748e219a-0f6d-4ab3-aa46-e62b6310ca30-kube-api-access-qd4j8\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.772552 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748e219a-0f6d-4ab3-aa46-e62b6310ca30-etc-machine-id\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.772567 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data-custom\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.772625 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.772676 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.874385 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data-custom\") pod \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.874488 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-combined-ca-bundle\") pod \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.874547 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-logs\") pod \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.874603 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tncn2\" (UniqueName: \"kubernetes.io/projected/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-kube-api-access-tncn2\") pod \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.874685 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data\") pod \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\" (UID: \"b5b2037e-3b1c-491f-9f12-d8e907ed85fc\") " Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.874950 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.874984 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748e219a-0f6d-4ab3-aa46-e62b6310ca30-logs\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.875011 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-scripts\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.875041 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qd4j8\" (UniqueName: \"kubernetes.io/projected/748e219a-0f6d-4ab3-aa46-e62b6310ca30-kube-api-access-qd4j8\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.875078 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748e219a-0f6d-4ab3-aa46-e62b6310ca30-etc-machine-id\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.875095 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data-custom\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.875161 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.883551 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748e219a-0f6d-4ab3-aa46-e62b6310ca30-etc-machine-id\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.892325 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-scripts\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.892671 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.893876 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data-custom\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.899116 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-logs" (OuterVolumeSpecName: "logs") pod "b5b2037e-3b1c-491f-9f12-d8e907ed85fc" (UID: "b5b2037e-3b1c-491f-9f12-d8e907ed85fc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.900548 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748e219a-0f6d-4ab3-aa46-e62b6310ca30-logs\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.918218 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qd4j8\" (UniqueName: \"kubernetes.io/projected/748e219a-0f6d-4ab3-aa46-e62b6310ca30-kube-api-access-qd4j8\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.919078 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data\") pod \"cinder-api-0\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " pod="openstack/cinder-api-0" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.929155 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b5b2037e-3b1c-491f-9f12-d8e907ed85fc" (UID: "b5b2037e-3b1c-491f-9f12-d8e907ed85fc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.929170 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-kube-api-access-tncn2" (OuterVolumeSpecName: "kube-api-access-tncn2") pod "b5b2037e-3b1c-491f-9f12-d8e907ed85fc" (UID: "b5b2037e-3b1c-491f-9f12-d8e907ed85fc"). InnerVolumeSpecName "kube-api-access-tncn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.929563 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.964044 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b5b2037e-3b1c-491f-9f12-d8e907ed85fc" (UID: "b5b2037e-3b1c-491f-9f12-d8e907ed85fc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.976754 4690 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.976971 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.977064 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.977147 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tncn2\" (UniqueName: \"kubernetes.io/projected/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-kube-api-access-tncn2\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:50 crc kubenswrapper[4690]: I0320 13:43:50.988712 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data" (OuterVolumeSpecName: "config-data") pod "b5b2037e-3b1c-491f-9f12-d8e907ed85fc" (UID: "b5b2037e-3b1c-491f-9f12-d8e907ed85fc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.036676 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.078950 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b5b2037e-3b1c-491f-9f12-d8e907ed85fc-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.140188 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-keystone-listener-68cf44d874-dw9jz"] Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.146654 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-keystone-listener-68cf44d874-dw9jz"] Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.180277 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.237468 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.622333 4690 scope.go:117] "RemoveContainer" containerID="f3c5d1a0bf01700e5502b96f2ff7a48e6fea76c8a18260189115cee4258010fd" Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.796674 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" event={"ID":"5e266b86-47ff-435f-b619-baa374a78476","Type":"ContainerDied","Data":"e93664ffafe947d63a827b9e2cf5a737bd6d03b2d483f839fd47e310e2618aa7"} Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.796730 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e93664ffafe947d63a827b9e2cf5a737bd6d03b2d483f839fd47e310e2618aa7" Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.885282 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.996915 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-sb\") pod \"5e266b86-47ff-435f-b619-baa374a78476\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.996986 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-svc\") pod \"5e266b86-47ff-435f-b619-baa374a78476\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.997042 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-config\") pod \"5e266b86-47ff-435f-b619-baa374a78476\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.997392 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qkzdp\" (UniqueName: \"kubernetes.io/projected/5e266b86-47ff-435f-b619-baa374a78476-kube-api-access-qkzdp\") pod \"5e266b86-47ff-435f-b619-baa374a78476\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.997424 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-nb\") pod \"5e266b86-47ff-435f-b619-baa374a78476\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " Mar 20 13:43:51 crc kubenswrapper[4690]: I0320 13:43:51.997465 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-swift-storage-0\") pod \"5e266b86-47ff-435f-b619-baa374a78476\" (UID: \"5e266b86-47ff-435f-b619-baa374a78476\") " Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.029061 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e266b86-47ff-435f-b619-baa374a78476-kube-api-access-qkzdp" (OuterVolumeSpecName: "kube-api-access-qkzdp") pod "5e266b86-47ff-435f-b619-baa374a78476" (UID: "5e266b86-47ff-435f-b619-baa374a78476"). InnerVolumeSpecName "kube-api-access-qkzdp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.078296 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5e266b86-47ff-435f-b619-baa374a78476" (UID: "5e266b86-47ff-435f-b619-baa374a78476"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.100086 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qkzdp\" (UniqueName: \"kubernetes.io/projected/5e266b86-47ff-435f-b619-baa374a78476-kube-api-access-qkzdp\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.100114 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.117242 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5e266b86-47ff-435f-b619-baa374a78476" (UID: "5e266b86-47ff-435f-b619-baa374a78476"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.118562 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5e266b86-47ff-435f-b619-baa374a78476" (UID: "5e266b86-47ff-435f-b619-baa374a78476"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.131360 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-config" (OuterVolumeSpecName: "config") pod "5e266b86-47ff-435f-b619-baa374a78476" (UID: "5e266b86-47ff-435f-b619-baa374a78476"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.160345 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5e266b86-47ff-435f-b619-baa374a78476" (UID: "5e266b86-47ff-435f-b619-baa374a78476"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:43:52 crc kubenswrapper[4690]: E0320 13:43:52.180763 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="93013757-d360-41e7-92a9-211155703015" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.201634 4690 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.201667 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.201677 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.201687 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e266b86-47ff-435f-b619-baa374a78476-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.398778 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-n9p8r"] Mar 20 13:43:52 crc kubenswrapper[4690]: W0320 13:43:52.406731 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod35385d42_f164_4605_8a55_290d5acc5192.slice/crio-5a5cb0f1722eec25638c791eb4fc843b1f74a82eb8b4d8de328dac0256b4631b WatchSource:0}: Error finding container 5a5cb0f1722eec25638c791eb4fc843b1f74a82eb8b4d8de328dac0256b4631b: Status 404 returned error can't find the container with id 5a5cb0f1722eec25638c791eb4fc843b1f74a82eb8b4d8de328dac0256b4631b Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.431216 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" path="/var/lib/kubelet/pods/b5b2037e-3b1c-491f-9f12-d8e907ed85fc/volumes" Mar 20 13:43:52 crc kubenswrapper[4690]: W0320 13:43:52.444379 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod748e219a_0f6d_4ab3_aa46_e62b6310ca30.slice/crio-7684e8839376ac286555e0dc74544579ad38733fa790d7c1e4aede3e1620ff6c WatchSource:0}: Error finding container 7684e8839376ac286555e0dc74544579ad38733fa790d7c1e4aede3e1620ff6c: Status 404 returned error can't find the container with id 7684e8839376ac286555e0dc74544579ad38733fa790d7c1e4aede3e1620ff6c Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.458769 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.584205 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.826159 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"93013757-d360-41e7-92a9-211155703015","Type":"ContainerStarted","Data":"2e3964488280759f9a2561fa2762617072b1f555e44977877464f2a5ae3d9a87"} Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.826533 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.826597 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="93013757-d360-41e7-92a9-211155703015" containerName="ceilometer-notification-agent" containerID="cri-o://898385b96ceec0a95ab4e0586069810b30d39cb8f8e881a7ffe396d66b2ca02f" gracePeriod=30 Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.826609 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="93013757-d360-41e7-92a9-211155703015" containerName="proxy-httpd" containerID="cri-o://2e3964488280759f9a2561fa2762617072b1f555e44977877464f2a5ae3d9a87" gracePeriod=30 Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.826650 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="93013757-d360-41e7-92a9-211155703015" containerName="sg-core" containerID="cri-o://62a084de7a7c77b0e84395468193dcf066a1779adca2afb483e12cc4a3932ad8" gracePeriod=30 Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.832086 4690 generic.go:334] "Generic (PLEG): container finished" podID="35385d42-f164-4605-8a55-290d5acc5192" containerID="bc0b536e1572bb5bbc6352489068799b6720a5d92ef13bec818de0c2f4aa5b81" exitCode=0 Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.832189 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" event={"ID":"35385d42-f164-4605-8a55-290d5acc5192","Type":"ContainerDied","Data":"bc0b536e1572bb5bbc6352489068799b6720a5d92ef13bec818de0c2f4aa5b81"} Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.832242 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" event={"ID":"35385d42-f164-4605-8a55-290d5acc5192","Type":"ContainerStarted","Data":"5a5cb0f1722eec25638c791eb4fc843b1f74a82eb8b4d8de328dac0256b4631b"} Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.836632 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748e219a-0f6d-4ab3-aa46-e62b6310ca30","Type":"ContainerStarted","Data":"7684e8839376ac286555e0dc74544579ad38733fa790d7c1e4aede3e1620ff6c"} Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.853164 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fb3cd5da-dd91-4226-9f58-85a1bb729397","Type":"ContainerStarted","Data":"9aee740f054a4df41678a2387d298240907c4a5754e0c1fd6955906f39067a33"} Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.865266 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.917000 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.931333 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-h7ptl"] Mar 20 13:43:52 crc kubenswrapper[4690]: I0320 13:43:52.940703 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6b7b667979-h7ptl"] Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.520226 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.695947 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-74474d96d6-48nxq" Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.758979 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-8b4d55c86-ddn7b"] Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.759196 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-8b4d55c86-ddn7b" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api-log" containerID="cri-o://78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f" gracePeriod=30 Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.759306 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-8b4d55c86-ddn7b" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api" containerID="cri-o://ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba" gracePeriod=30 Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.897629 4690 generic.go:334] "Generic (PLEG): container finished" podID="93013757-d360-41e7-92a9-211155703015" containerID="2e3964488280759f9a2561fa2762617072b1f555e44977877464f2a5ae3d9a87" exitCode=0 Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.897981 4690 generic.go:334] "Generic (PLEG): container finished" podID="93013757-d360-41e7-92a9-211155703015" containerID="62a084de7a7c77b0e84395468193dcf066a1779adca2afb483e12cc4a3932ad8" exitCode=2 Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.897715 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"93013757-d360-41e7-92a9-211155703015","Type":"ContainerDied","Data":"2e3964488280759f9a2561fa2762617072b1f555e44977877464f2a5ae3d9a87"} Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.898059 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"93013757-d360-41e7-92a9-211155703015","Type":"ContainerDied","Data":"62a084de7a7c77b0e84395468193dcf066a1779adca2afb483e12cc4a3932ad8"} Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.900447 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" event={"ID":"35385d42-f164-4605-8a55-290d5acc5192","Type":"ContainerStarted","Data":"9aeb5088c565fdb27201256af0e2df1079d1ca094ff0a39688814c0f027a9bac"} Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.901701 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.911002 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748e219a-0f6d-4ab3-aa46-e62b6310ca30","Type":"ContainerStarted","Data":"098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5"} Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.913150 4690 generic.go:334] "Generic (PLEG): container finished" podID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerID="78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f" exitCode=143 Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.913914 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8b4d55c86-ddn7b" event={"ID":"e3f864c3-ce54-42e4-b324-f488eea1fadc","Type":"ContainerDied","Data":"78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f"} Mar 20 13:43:53 crc kubenswrapper[4690]: I0320 13:43:53.936218 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" podStartSLOduration=3.936193669 podStartE2EDuration="3.936193669s" podCreationTimestamp="2026-03-20 13:43:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:53.920240794 +0000 UTC m=+1280.209840737" watchObservedRunningTime="2026-03-20 13:43:53.936193669 +0000 UTC m=+1280.225793612" Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.432090 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e266b86-47ff-435f-b619-baa374a78476" path="/var/lib/kubelet/pods/5e266b86-47ff-435f-b619-baa374a78476/volumes" Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.930171 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fb3cd5da-dd91-4226-9f58-85a1bb729397","Type":"ContainerStarted","Data":"e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913"} Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.930371 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fb3cd5da-dd91-4226-9f58-85a1bb729397","Type":"ContainerStarted","Data":"a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9"} Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.942909 4690 generic.go:334] "Generic (PLEG): container finished" podID="93013757-d360-41e7-92a9-211155703015" containerID="898385b96ceec0a95ab4e0586069810b30d39cb8f8e881a7ffe396d66b2ca02f" exitCode=0 Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.942969 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"93013757-d360-41e7-92a9-211155703015","Type":"ContainerDied","Data":"898385b96ceec0a95ab4e0586069810b30d39cb8f8e881a7ffe396d66b2ca02f"} Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.958462 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.296905446 podStartE2EDuration="4.958445275s" podCreationTimestamp="2026-03-20 13:43:50 +0000 UTC" firstStartedPulling="2026-03-20 13:43:52.602674443 +0000 UTC m=+1278.892274386" lastFinishedPulling="2026-03-20 13:43:53.264214272 +0000 UTC m=+1279.553814215" observedRunningTime="2026-03-20 13:43:54.956572532 +0000 UTC m=+1281.246172475" watchObservedRunningTime="2026-03-20 13:43:54.958445275 +0000 UTC m=+1281.248045218" Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.961110 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerName="cinder-api-log" containerID="cri-o://098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5" gracePeriod=30 Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.961418 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748e219a-0f6d-4ab3-aa46-e62b6310ca30","Type":"ContainerStarted","Data":"88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f"} Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.961452 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Mar 20 13:43:54 crc kubenswrapper[4690]: I0320 13:43:54.961491 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerName="cinder-api" containerID="cri-o://88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f" gracePeriod=30 Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.004496 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.004474318 podStartE2EDuration="5.004474318s" podCreationTimestamp="2026-03-20 13:43:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:54.982988715 +0000 UTC m=+1281.272588688" watchObservedRunningTime="2026-03-20 13:43:55.004474318 +0000 UTC m=+1281.294074261" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.161482 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.291824 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-scripts\") pod \"93013757-d360-41e7-92a9-211155703015\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.292178 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bk5j5\" (UniqueName: \"kubernetes.io/projected/93013757-d360-41e7-92a9-211155703015-kube-api-access-bk5j5\") pod \"93013757-d360-41e7-92a9-211155703015\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.292240 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-config-data\") pod \"93013757-d360-41e7-92a9-211155703015\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.292260 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-sg-core-conf-yaml\") pod \"93013757-d360-41e7-92a9-211155703015\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.292309 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-combined-ca-bundle\") pod \"93013757-d360-41e7-92a9-211155703015\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.292393 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-run-httpd\") pod \"93013757-d360-41e7-92a9-211155703015\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.292415 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-log-httpd\") pod \"93013757-d360-41e7-92a9-211155703015\" (UID: \"93013757-d360-41e7-92a9-211155703015\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.293040 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "93013757-d360-41e7-92a9-211155703015" (UID: "93013757-d360-41e7-92a9-211155703015"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.293186 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "93013757-d360-41e7-92a9-211155703015" (UID: "93013757-d360-41e7-92a9-211155703015"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.306429 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/93013757-d360-41e7-92a9-211155703015-kube-api-access-bk5j5" (OuterVolumeSpecName: "kube-api-access-bk5j5") pod "93013757-d360-41e7-92a9-211155703015" (UID: "93013757-d360-41e7-92a9-211155703015"). InnerVolumeSpecName "kube-api-access-bk5j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.320652 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-scripts" (OuterVolumeSpecName: "scripts") pod "93013757-d360-41e7-92a9-211155703015" (UID: "93013757-d360-41e7-92a9-211155703015"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.323712 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "93013757-d360-41e7-92a9-211155703015" (UID: "93013757-d360-41e7-92a9-211155703015"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.396653 4690 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.396675 4690 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.396684 4690 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/93013757-d360-41e7-92a9-211155703015-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.396691 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.396700 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bk5j5\" (UniqueName: \"kubernetes.io/projected/93013757-d360-41e7-92a9-211155703015-kube-api-access-bk5j5\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.399570 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "93013757-d360-41e7-92a9-211155703015" (UID: "93013757-d360-41e7-92a9-211155703015"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.454953 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-config-data" (OuterVolumeSpecName: "config-data") pod "93013757-d360-41e7-92a9-211155703015" (UID: "93013757-d360-41e7-92a9-211155703015"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.500009 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.500034 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/93013757-d360-41e7-92a9-211155703015-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.565228 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.572081 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.704744 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qd4j8\" (UniqueName: \"kubernetes.io/projected/748e219a-0f6d-4ab3-aa46-e62b6310ca30-kube-api-access-qd4j8\") pod \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.704798 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-combined-ca-bundle\") pod \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.704860 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-scripts\") pod \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.705016 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748e219a-0f6d-4ab3-aa46-e62b6310ca30-etc-machine-id\") pod \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.705060 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748e219a-0f6d-4ab3-aa46-e62b6310ca30-logs\") pod \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.705106 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data-custom\") pod \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.705124 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data\") pod \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\" (UID: \"748e219a-0f6d-4ab3-aa46-e62b6310ca30\") " Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.705412 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/748e219a-0f6d-4ab3-aa46-e62b6310ca30-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "748e219a-0f6d-4ab3-aa46-e62b6310ca30" (UID: "748e219a-0f6d-4ab3-aa46-e62b6310ca30"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.706193 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/748e219a-0f6d-4ab3-aa46-e62b6310ca30-logs" (OuterVolumeSpecName: "logs") pod "748e219a-0f6d-4ab3-aa46-e62b6310ca30" (UID: "748e219a-0f6d-4ab3-aa46-e62b6310ca30"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.709165 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/748e219a-0f6d-4ab3-aa46-e62b6310ca30-kube-api-access-qd4j8" (OuterVolumeSpecName: "kube-api-access-qd4j8") pod "748e219a-0f6d-4ab3-aa46-e62b6310ca30" (UID: "748e219a-0f6d-4ab3-aa46-e62b6310ca30"). InnerVolumeSpecName "kube-api-access-qd4j8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.710300 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "748e219a-0f6d-4ab3-aa46-e62b6310ca30" (UID: "748e219a-0f6d-4ab3-aa46-e62b6310ca30"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.718040 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-scripts" (OuterVolumeSpecName: "scripts") pod "748e219a-0f6d-4ab3-aa46-e62b6310ca30" (UID: "748e219a-0f6d-4ab3-aa46-e62b6310ca30"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.741162 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "748e219a-0f6d-4ab3-aa46-e62b6310ca30" (UID: "748e219a-0f6d-4ab3-aa46-e62b6310ca30"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.767822 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data" (OuterVolumeSpecName: "config-data") pod "748e219a-0f6d-4ab3-aa46-e62b6310ca30" (UID: "748e219a-0f6d-4ab3-aa46-e62b6310ca30"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.807576 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/748e219a-0f6d-4ab3-aa46-e62b6310ca30-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.807619 4690 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.807634 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.807647 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qd4j8\" (UniqueName: \"kubernetes.io/projected/748e219a-0f6d-4ab3-aa46-e62b6310ca30-kube-api-access-qd4j8\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.807659 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.807669 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/748e219a-0f6d-4ab3-aa46-e62b6310ca30-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.807679 4690 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/748e219a-0f6d-4ab3-aa46-e62b6310ca30-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.944138 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6b7b667979-h7ptl" podUID="5e266b86-47ff-435f-b619-baa374a78476" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.155:5353: i/o timeout" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.979707 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"93013757-d360-41e7-92a9-211155703015","Type":"ContainerDied","Data":"47e62e6e8ea51111fd27e6969e7993fb093220f9f056010d7ec78a1f6323d0a6"} Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.980148 4690 scope.go:117] "RemoveContainer" containerID="2e3964488280759f9a2561fa2762617072b1f555e44977877464f2a5ae3d9a87" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.980482 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.984321 4690 generic.go:334] "Generic (PLEG): container finished" podID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerID="88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f" exitCode=0 Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.984345 4690 generic.go:334] "Generic (PLEG): container finished" podID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerID="098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5" exitCode=143 Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.985195 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748e219a-0f6d-4ab3-aa46-e62b6310ca30","Type":"ContainerDied","Data":"88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f"} Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.985292 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748e219a-0f6d-4ab3-aa46-e62b6310ca30","Type":"ContainerDied","Data":"098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5"} Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.985224 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.985613 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:43:55 crc kubenswrapper[4690]: I0320 13:43:55.985634 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"748e219a-0f6d-4ab3-aa46-e62b6310ca30","Type":"ContainerDied","Data":"7684e8839376ac286555e0dc74544579ad38733fa790d7c1e4aede3e1620ff6c"} Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.024952 4690 scope.go:117] "RemoveContainer" containerID="62a084de7a7c77b0e84395468193dcf066a1779adca2afb483e12cc4a3932ad8" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.095080 4690 scope.go:117] "RemoveContainer" containerID="898385b96ceec0a95ab4e0586069810b30d39cb8f8e881a7ffe396d66b2ca02f" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.120910 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.128965 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.142828 4690 scope.go:117] "RemoveContainer" containerID="88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.160687 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.169534 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.180296 4690 scope.go:117] "RemoveContainer" containerID="098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.181949 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.182313 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e266b86-47ff-435f-b619-baa374a78476" containerName="init" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182328 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e266b86-47ff-435f-b619-baa374a78476" containerName="init" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.182348 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerName="barbican-keystone-listener-log" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182355 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerName="barbican-keystone-listener-log" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.182370 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerName="cinder-api-log" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182375 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerName="cinder-api-log" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.182386 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerName="barbican-keystone-listener" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182392 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerName="barbican-keystone-listener" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.182405 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerName="cinder-api" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182411 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerName="cinder-api" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.182426 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93013757-d360-41e7-92a9-211155703015" containerName="ceilometer-notification-agent" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182432 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="93013757-d360-41e7-92a9-211155703015" containerName="ceilometer-notification-agent" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.182442 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93013757-d360-41e7-92a9-211155703015" containerName="sg-core" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182449 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="93013757-d360-41e7-92a9-211155703015" containerName="sg-core" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.182464 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e266b86-47ff-435f-b619-baa374a78476" containerName="dnsmasq-dns" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182478 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e266b86-47ff-435f-b619-baa374a78476" containerName="dnsmasq-dns" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.182489 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="93013757-d360-41e7-92a9-211155703015" containerName="proxy-httpd" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182495 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="93013757-d360-41e7-92a9-211155703015" containerName="proxy-httpd" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182641 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="93013757-d360-41e7-92a9-211155703015" containerName="ceilometer-notification-agent" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182650 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerName="barbican-keystone-listener-log" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182663 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerName="cinder-api" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182672 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" containerName="cinder-api-log" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182681 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="93013757-d360-41e7-92a9-211155703015" containerName="proxy-httpd" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182691 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e266b86-47ff-435f-b619-baa374a78476" containerName="dnsmasq-dns" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182703 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="93013757-d360-41e7-92a9-211155703015" containerName="sg-core" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.182713 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5b2037e-3b1c-491f-9f12-d8e907ed85fc" containerName="barbican-keystone-listener" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.184279 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.186617 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.186973 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.197574 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.204527 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.206168 4690 scope.go:117] "RemoveContainer" containerID="88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.206275 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.208249 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f\": container with ID starting with 88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f not found: ID does not exist" containerID="88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.208284 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f"} err="failed to get container status \"88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f\": rpc error: code = NotFound desc = could not find container \"88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f\": container with ID starting with 88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f not found: ID does not exist" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.208326 4690 scope.go:117] "RemoveContainer" containerID="098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.208648 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.208675 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.208786 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Mar 20 13:43:56 crc kubenswrapper[4690]: E0320 13:43:56.212337 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5\": container with ID starting with 098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5 not found: ID does not exist" containerID="098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.212392 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5"} err="failed to get container status \"098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5\": rpc error: code = NotFound desc = could not find container \"098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5\": container with ID starting with 098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5 not found: ID does not exist" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.212424 4690 scope.go:117] "RemoveContainer" containerID="88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.214701 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.218482 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f"} err="failed to get container status \"88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f\": rpc error: code = NotFound desc = could not find container \"88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f\": container with ID starting with 88e6b84ab5862ebe44ead32f158073c690530e50b20d32975b8a78df74899e5f not found: ID does not exist" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.218522 4690 scope.go:117] "RemoveContainer" containerID="098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.218949 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5"} err="failed to get container status \"098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5\": rpc error: code = NotFound desc = could not find container \"098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5\": container with ID starting with 098e692acc620a1a5d48a213b2652750bb11802200703e470f26fbb99ca739d5 not found: ID does not exist" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.289247 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-59dcfcb47f-zsm8d"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.289466 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-59dcfcb47f-zsm8d" podUID="45b97511-9613-4868-844e-689823a4ae38" containerName="neutron-api" containerID="cri-o://2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c" gracePeriod=30 Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.290027 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-59dcfcb47f-zsm8d" podUID="45b97511-9613-4868-844e-689823a4ae38" containerName="neutron-httpd" containerID="cri-o://20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58" gracePeriod=30 Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.317886 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319413 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319454 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/90a721fb-0ffe-4b4f-890c-97dca6ee9303-etc-machine-id\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319476 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-scripts\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319501 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-config-data\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319531 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wjhpq\" (UniqueName: \"kubernetes.io/projected/90a721fb-0ffe-4b4f-890c-97dca6ee9303-kube-api-access-wjhpq\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319586 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-run-httpd\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319601 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-log-httpd\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319619 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-scripts\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319641 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90a721fb-0ffe-4b4f-890c-97dca6ee9303-logs\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319669 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wh6bb\" (UniqueName: \"kubernetes.io/projected/0675c9f3-f43f-4b81-841a-555b9d7152fd-kube-api-access-wh6bb\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319686 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-config-data\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319729 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319745 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319761 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319779 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-config-data-custom\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.319807 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-public-tls-certs\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.347814 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-64dbdc6bf-bqlx5"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.349237 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.360409 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-64dbdc6bf-bqlx5"] Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421019 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-run-httpd\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421051 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-log-httpd\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421075 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-scripts\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421097 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90a721fb-0ffe-4b4f-890c-97dca6ee9303-logs\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421128 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wh6bb\" (UniqueName: \"kubernetes.io/projected/0675c9f3-f43f-4b81-841a-555b9d7152fd-kube-api-access-wh6bb\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421147 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-config-data\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421503 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421528 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421543 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421567 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-config-data-custom\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421598 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-public-tls-certs\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421619 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421639 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/90a721fb-0ffe-4b4f-890c-97dca6ee9303-etc-machine-id\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421658 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-scripts\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421679 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-config-data\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421707 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wjhpq\" (UniqueName: \"kubernetes.io/projected/90a721fb-0ffe-4b4f-890c-97dca6ee9303-kube-api-access-wjhpq\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421743 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-run-httpd\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.421804 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-log-httpd\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.422019 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/90a721fb-0ffe-4b4f-890c-97dca6ee9303-etc-machine-id\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.422602 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/90a721fb-0ffe-4b4f-890c-97dca6ee9303-logs\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.425992 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-scripts\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.426655 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.426772 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.427007 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="748e219a-0f6d-4ab3-aa46-e62b6310ca30" path="/var/lib/kubelet/pods/748e219a-0f6d-4ab3-aa46-e62b6310ca30/volumes" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.427346 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-config-data\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.427360 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.427760 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="93013757-d360-41e7-92a9-211155703015" path="/var/lib/kubelet/pods/93013757-d360-41e7-92a9-211155703015/volumes" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.428373 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-public-tls-certs\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.428654 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.430760 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-config-data\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.432447 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-scripts\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.432908 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/90a721fb-0ffe-4b4f-890c-97dca6ee9303-config-data-custom\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.437029 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wh6bb\" (UniqueName: \"kubernetes.io/projected/0675c9f3-f43f-4b81-841a-555b9d7152fd-kube-api-access-wh6bb\") pod \"ceilometer-0\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.440017 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wjhpq\" (UniqueName: \"kubernetes.io/projected/90a721fb-0ffe-4b4f-890c-97dca6ee9303-kube-api-access-wjhpq\") pod \"cinder-api-0\" (UID: \"90a721fb-0ffe-4b4f-890c-97dca6ee9303\") " pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.507699 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.523139 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6d8gg\" (UniqueName: \"kubernetes.io/projected/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-kube-api-access-6d8gg\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.523188 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-httpd-config\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.523239 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-config\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.523262 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-internal-tls-certs\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.523310 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-combined-ca-bundle\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.523396 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-public-tls-certs\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.523474 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-ovndb-tls-certs\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.531323 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.625765 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-public-tls-certs\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.625896 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-ovndb-tls-certs\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.625954 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6d8gg\" (UniqueName: \"kubernetes.io/projected/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-kube-api-access-6d8gg\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.626012 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-httpd-config\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.626072 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-config\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.626091 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-internal-tls-certs\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.626146 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-combined-ca-bundle\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.630836 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-combined-ca-bundle\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.631430 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-public-tls-certs\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.631490 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-internal-tls-certs\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.631509 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-ovndb-tls-certs\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.632278 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-config\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.632865 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-httpd-config\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.644409 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6d8gg\" (UniqueName: \"kubernetes.io/projected/ece8bb9b-d177-45ba-8888-0c3df7c38bb8-kube-api-access-6d8gg\") pod \"neutron-64dbdc6bf-bqlx5\" (UID: \"ece8bb9b-d177-45ba-8888-0c3df7c38bb8\") " pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:56 crc kubenswrapper[4690]: I0320 13:43:56.717628 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.037086 4690 generic.go:334] "Generic (PLEG): container finished" podID="45b97511-9613-4868-844e-689823a4ae38" containerID="20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58" exitCode=0 Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.037916 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-59dcfcb47f-zsm8d" event={"ID":"45b97511-9613-4868-844e-689823a4ae38","Type":"ContainerDied","Data":"20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58"} Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.074939 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:43:57 crc kubenswrapper[4690]: W0320 13:43:57.080080 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod0675c9f3_f43f_4b81_841a_555b9d7152fd.slice/crio-915ee46e3211699019c4d40892e9020474bc1a377e51f791985269826f16a776 WatchSource:0}: Error finding container 915ee46e3211699019c4d40892e9020474bc1a377e51f791985269826f16a776: Status 404 returned error can't find the container with id 915ee46e3211699019c4d40892e9020474bc1a377e51f791985269826f16a776 Mar 20 13:43:57 crc kubenswrapper[4690]: W0320 13:43:57.081395 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod90a721fb_0ffe_4b4f_890c_97dca6ee9303.slice/crio-22823e15f638ede90cb84bd24f8c56f738c16667a18e64313e0cb8d401f4075d WatchSource:0}: Error finding container 22823e15f638ede90cb84bd24f8c56f738c16667a18e64313e0cb8d401f4075d: Status 404 returned error can't find the container with id 22823e15f638ede90cb84bd24f8c56f738c16667a18e64313e0cb8d401f4075d Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.085379 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.187555 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8b4d55c86-ddn7b" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.167:9311/healthcheck\": read tcp 10.217.0.2:35184->10.217.0.167:9311: read: connection reset by peer" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.187586 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-8b4d55c86-ddn7b" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.167:9311/healthcheck\": read tcp 10.217.0.2:35172->10.217.0.167:9311: read: connection reset by peer" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.222680 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-64dbdc6bf-bqlx5"] Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.747506 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.859327 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data-custom\") pod \"e3f864c3-ce54-42e4-b324-f488eea1fadc\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.859383 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6d5ds\" (UniqueName: \"kubernetes.io/projected/e3f864c3-ce54-42e4-b324-f488eea1fadc-kube-api-access-6d5ds\") pod \"e3f864c3-ce54-42e4-b324-f488eea1fadc\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.859443 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-combined-ca-bundle\") pod \"e3f864c3-ce54-42e4-b324-f488eea1fadc\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.859468 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3f864c3-ce54-42e4-b324-f488eea1fadc-logs\") pod \"e3f864c3-ce54-42e4-b324-f488eea1fadc\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.859508 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data\") pod \"e3f864c3-ce54-42e4-b324-f488eea1fadc\" (UID: \"e3f864c3-ce54-42e4-b324-f488eea1fadc\") " Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.863211 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3f864c3-ce54-42e4-b324-f488eea1fadc-logs" (OuterVolumeSpecName: "logs") pod "e3f864c3-ce54-42e4-b324-f488eea1fadc" (UID: "e3f864c3-ce54-42e4-b324-f488eea1fadc"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.874482 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3f864c3-ce54-42e4-b324-f488eea1fadc-kube-api-access-6d5ds" (OuterVolumeSpecName: "kube-api-access-6d5ds") pod "e3f864c3-ce54-42e4-b324-f488eea1fadc" (UID: "e3f864c3-ce54-42e4-b324-f488eea1fadc"). InnerVolumeSpecName "kube-api-access-6d5ds". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.889112 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "e3f864c3-ce54-42e4-b324-f488eea1fadc" (UID: "e3f864c3-ce54-42e4-b324-f488eea1fadc"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.966932 4690 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.966964 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6d5ds\" (UniqueName: \"kubernetes.io/projected/e3f864c3-ce54-42e4-b324-f488eea1fadc-kube-api-access-6d5ds\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.966974 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e3f864c3-ce54-42e4-b324-f488eea1fadc-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.975962 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e3f864c3-ce54-42e4-b324-f488eea1fadc" (UID: "e3f864c3-ce54-42e4-b324-f488eea1fadc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:57 crc kubenswrapper[4690]: I0320 13:43:57.976045 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data" (OuterVolumeSpecName: "config-data") pod "e3f864c3-ce54-42e4-b324-f488eea1fadc" (UID: "e3f864c3-ce54-42e4-b324-f488eea1fadc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.049775 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"90a721fb-0ffe-4b4f-890c-97dca6ee9303","Type":"ContainerStarted","Data":"594077964b3b0796cd2845b8ebf4eb53afac5e2b95536bc7559dd04bf9886927"} Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.049817 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"90a721fb-0ffe-4b4f-890c-97dca6ee9303","Type":"ContainerStarted","Data":"22823e15f638ede90cb84bd24f8c56f738c16667a18e64313e0cb8d401f4075d"} Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.052530 4690 generic.go:334] "Generic (PLEG): container finished" podID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerID="ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba" exitCode=0 Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.052582 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8b4d55c86-ddn7b" event={"ID":"e3f864c3-ce54-42e4-b324-f488eea1fadc","Type":"ContainerDied","Data":"ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba"} Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.052605 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-8b4d55c86-ddn7b" event={"ID":"e3f864c3-ce54-42e4-b324-f488eea1fadc","Type":"ContainerDied","Data":"d36013c6617c880fa8b937d7ffe8b64a634c7d21e5d87e0074c47ac15bc411e7"} Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.052623 4690 scope.go:117] "RemoveContainer" containerID="ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.052727 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-8b4d55c86-ddn7b" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.058820 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64dbdc6bf-bqlx5" event={"ID":"ece8bb9b-d177-45ba-8888-0c3df7c38bb8","Type":"ContainerStarted","Data":"a949832be2c0af9137be0cba001eb5f1ba7dd1743df057e53d28cc7d82ab8e5f"} Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.058874 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64dbdc6bf-bqlx5" event={"ID":"ece8bb9b-d177-45ba-8888-0c3df7c38bb8","Type":"ContainerStarted","Data":"653f247eed8bcf1f802f421e0b61b1d413022dc5c83eec25fd8be591868e14f8"} Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.058886 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-64dbdc6bf-bqlx5" event={"ID":"ece8bb9b-d177-45ba-8888-0c3df7c38bb8","Type":"ContainerStarted","Data":"e127be36744db2c545660d7ab284376b9b29f20b2f0301aa3ceb478685cff65c"} Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.058989 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.065572 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerStarted","Data":"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8"} Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.065616 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerStarted","Data":"915ee46e3211699019c4d40892e9020474bc1a377e51f791985269826f16a776"} Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.068666 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.068909 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e3f864c3-ce54-42e4-b324-f488eea1fadc-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.079600 4690 scope.go:117] "RemoveContainer" containerID="78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.101703 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-64dbdc6bf-bqlx5" podStartSLOduration=2.10168067 podStartE2EDuration="2.10168067s" podCreationTimestamp="2026-03-20 13:43:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:58.081268898 +0000 UTC m=+1284.370868841" watchObservedRunningTime="2026-03-20 13:43:58.10168067 +0000 UTC m=+1284.391280623" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.116993 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/neutron-59dcfcb47f-zsm8d" podUID="45b97511-9613-4868-844e-689823a4ae38" containerName="neutron-httpd" probeResult="failure" output="Get \"https://10.217.0.157:9696/\": dial tcp 10.217.0.157:9696: connect: connection refused" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.126812 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-8b4d55c86-ddn7b"] Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.128764 4690 scope.go:117] "RemoveContainer" containerID="ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba" Mar 20 13:43:58 crc kubenswrapper[4690]: E0320 13:43:58.130334 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba\": container with ID starting with ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba not found: ID does not exist" containerID="ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.130372 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba"} err="failed to get container status \"ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba\": rpc error: code = NotFound desc = could not find container \"ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba\": container with ID starting with ef5df4d510829f17a39df7d45a9b3d971c864cd0dabbf4b85638f80505903aba not found: ID does not exist" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.130396 4690 scope.go:117] "RemoveContainer" containerID="78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f" Mar 20 13:43:58 crc kubenswrapper[4690]: E0320 13:43:58.130596 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f\": container with ID starting with 78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f not found: ID does not exist" containerID="78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.130617 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f"} err="failed to get container status \"78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f\": rpc error: code = NotFound desc = could not find container \"78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f\": container with ID starting with 78cf15820441d52b0b0ae5265aa594ac47934664a3062ba05ed2d7c83cb3ca1f not found: ID does not exist" Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.133865 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-8b4d55c86-ddn7b"] Mar 20 13:43:58 crc kubenswrapper[4690]: I0320 13:43:58.432867 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" path="/var/lib/kubelet/pods/e3f864c3-ce54-42e4-b324-f488eea1fadc/volumes" Mar 20 13:43:59 crc kubenswrapper[4690]: I0320 13:43:59.091224 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerStarted","Data":"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a"} Mar 20 13:43:59 crc kubenswrapper[4690]: I0320 13:43:59.096802 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"90a721fb-0ffe-4b4f-890c-97dca6ee9303","Type":"ContainerStarted","Data":"2a5537fd2861943fe7e44e0c29d0cfdd8d879a5914982fdff604a3668d97a96f"} Mar 20 13:43:59 crc kubenswrapper[4690]: I0320 13:43:59.126084 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.126049467 podStartE2EDuration="3.126049467s" podCreationTimestamp="2026-03-20 13:43:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:43:59.115727192 +0000 UTC m=+1285.405327185" watchObservedRunningTime="2026-03-20 13:43:59.126049467 +0000 UTC m=+1285.415649490" Mar 20 13:43:59 crc kubenswrapper[4690]: I0320 13:43:59.206431 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:43:59 crc kubenswrapper[4690]: I0320 13:43:59.298705 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.111879 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerStarted","Data":"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7"} Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.112165 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.133948 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566904-dq7dm"] Mar 20 13:44:00 crc kubenswrapper[4690]: E0320 13:44:00.134353 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api-log" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.134370 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api-log" Mar 20 13:44:00 crc kubenswrapper[4690]: E0320 13:44:00.134391 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.134397 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.134570 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.134588 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3f864c3-ce54-42e4-b324-f488eea1fadc" containerName="barbican-api-log" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.135193 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566904-dq7dm" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.137514 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.137757 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.137907 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.144470 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566904-dq7dm"] Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.311355 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggtfs\" (UniqueName: \"kubernetes.io/projected/3777e376-e740-4d36-9378-27dec9e98ec8-kube-api-access-ggtfs\") pod \"auto-csr-approver-29566904-dq7dm\" (UID: \"3777e376-e740-4d36-9378-27dec9e98ec8\") " pod="openshift-infra/auto-csr-approver-29566904-dq7dm" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.415955 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggtfs\" (UniqueName: \"kubernetes.io/projected/3777e376-e740-4d36-9378-27dec9e98ec8-kube-api-access-ggtfs\") pod \"auto-csr-approver-29566904-dq7dm\" (UID: \"3777e376-e740-4d36-9378-27dec9e98ec8\") " pod="openshift-infra/auto-csr-approver-29566904-dq7dm" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.442774 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggtfs\" (UniqueName: \"kubernetes.io/projected/3777e376-e740-4d36-9378-27dec9e98ec8-kube-api-access-ggtfs\") pod \"auto-csr-approver-29566904-dq7dm\" (UID: \"3777e376-e740-4d36-9378-27dec9e98ec8\") " pod="openshift-infra/auto-csr-approver-29566904-dq7dm" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.455025 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566904-dq7dm" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.790232 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.869141 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Mar 20 13:44:00 crc kubenswrapper[4690]: I0320 13:44:00.931169 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.020562 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566904-dq7dm"] Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.037055 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-xd7ns"] Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.037278 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" podUID="1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" containerName="dnsmasq-dns" containerID="cri-o://a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5" gracePeriod=10 Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.124656 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerName="cinder-scheduler" containerID="cri-o://a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9" gracePeriod=30 Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.124940 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566904-dq7dm" event={"ID":"3777e376-e740-4d36-9378-27dec9e98ec8","Type":"ContainerStarted","Data":"9b7dd94b225f730b22c371ed16096beb3ce213d128731435ebfe20e33308c081"} Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.125966 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerName="probe" containerID="cri-o://e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913" gracePeriod=30 Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.159925 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.234073 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-7946cd7f64-rm6mr" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.294647 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-587c585984-xs7nl"] Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.696468 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.757600 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.848932 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-httpd-config\") pod \"45b97511-9613-4868-844e-689823a4ae38\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849116 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-svc\") pod \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849174 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-config\") pod \"45b97511-9613-4868-844e-689823a4ae38\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849197 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-sb\") pod \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849244 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-nb\") pod \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849279 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-combined-ca-bundle\") pod \"45b97511-9613-4868-844e-689823a4ae38\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849306 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69nqh\" (UniqueName: \"kubernetes.io/projected/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-kube-api-access-69nqh\") pod \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849331 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-ovndb-tls-certs\") pod \"45b97511-9613-4868-844e-689823a4ae38\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849360 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-public-tls-certs\") pod \"45b97511-9613-4868-844e-689823a4ae38\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849395 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-swift-storage-0\") pod \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849450 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-internal-tls-certs\") pod \"45b97511-9613-4868-844e-689823a4ae38\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849501 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-config\") pod \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\" (UID: \"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.849594 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mljhd\" (UniqueName: \"kubernetes.io/projected/45b97511-9613-4868-844e-689823a4ae38-kube-api-access-mljhd\") pod \"45b97511-9613-4868-844e-689823a4ae38\" (UID: \"45b97511-9613-4868-844e-689823a4ae38\") " Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.855922 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45b97511-9613-4868-844e-689823a4ae38-kube-api-access-mljhd" (OuterVolumeSpecName: "kube-api-access-mljhd") pod "45b97511-9613-4868-844e-689823a4ae38" (UID: "45b97511-9613-4868-844e-689823a4ae38"). InnerVolumeSpecName "kube-api-access-mljhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.862198 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "45b97511-9613-4868-844e-689823a4ae38" (UID: "45b97511-9613-4868-844e-689823a4ae38"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.867345 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-kube-api-access-69nqh" (OuterVolumeSpecName: "kube-api-access-69nqh") pod "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" (UID: "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec"). InnerVolumeSpecName "kube-api-access-69nqh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.947997 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "45b97511-9613-4868-844e-689823a4ae38" (UID: "45b97511-9613-4868-844e-689823a4ae38"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.954367 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69nqh\" (UniqueName: \"kubernetes.io/projected/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-kube-api-access-69nqh\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.954586 4690 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.954711 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mljhd\" (UniqueName: \"kubernetes.io/projected/45b97511-9613-4868-844e-689823a4ae38-kube-api-access-mljhd\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.954797 4690 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-httpd-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.957357 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" (UID: "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.973443 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" (UID: "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.980950 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "45b97511-9613-4868-844e-689823a4ae38" (UID: "45b97511-9613-4868-844e-689823a4ae38"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:01 crc kubenswrapper[4690]: I0320 13:44:01.981049 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "45b97511-9613-4868-844e-689823a4ae38" (UID: "45b97511-9613-4868-844e-689823a4ae38"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:01.999978 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-config" (OuterVolumeSpecName: "config") pod "45b97511-9613-4868-844e-689823a4ae38" (UID: "45b97511-9613-4868-844e-689823a4ae38"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.000394 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" (UID: "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.008231 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" (UID: "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.011551 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-config" (OuterVolumeSpecName: "config") pod "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" (UID: "1f4ce9f4-b42b-4bfe-9902-9f61ff722aec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.049960 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "45b97511-9613-4868-844e-689823a4ae38" (UID: "45b97511-9613-4868-844e-689823a4ae38"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.056415 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.056446 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.056461 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.056474 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.056484 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.056495 4690 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.056503 4690 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/45b97511-9613-4868-844e-689823a4ae38-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.056512 4690 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.056520 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.157838 4690 generic.go:334] "Generic (PLEG): container finished" podID="45b97511-9613-4868-844e-689823a4ae38" containerID="2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c" exitCode=0 Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.157968 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-59dcfcb47f-zsm8d" event={"ID":"45b97511-9613-4868-844e-689823a4ae38","Type":"ContainerDied","Data":"2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c"} Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.158002 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-59dcfcb47f-zsm8d" event={"ID":"45b97511-9613-4868-844e-689823a4ae38","Type":"ContainerDied","Data":"13a7fb714bb3d3e8957d5ecf719180866fb096de275689d50bff70d6fb4ec9eb"} Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.158024 4690 scope.go:117] "RemoveContainer" containerID="20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.158155 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-59dcfcb47f-zsm8d" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.169751 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerStarted","Data":"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37"} Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.170787 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.181414 4690 generic.go:334] "Generic (PLEG): container finished" podID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerID="e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913" exitCode=0 Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.181518 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fb3cd5da-dd91-4226-9f58-85a1bb729397","Type":"ContainerDied","Data":"e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913"} Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.197000 4690 scope.go:117] "RemoveContainer" containerID="2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.197935 4690 generic.go:334] "Generic (PLEG): container finished" podID="1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" containerID="a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5" exitCode=0 Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.198089 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-587c585984-xs7nl" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon-log" containerID="cri-o://b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88" gracePeriod=30 Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.198286 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.198947 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" event={"ID":"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec","Type":"ContainerDied","Data":"a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5"} Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.199021 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-848cf88cfc-xd7ns" event={"ID":"1f4ce9f4-b42b-4bfe-9902-9f61ff722aec","Type":"ContainerDied","Data":"04942e888f50463d4f86c3771f38e5d2ce16420611509c47eef8c4dcaa4efbaf"} Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.199078 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-587c585984-xs7nl" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon" containerID="cri-o://05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34" gracePeriod=30 Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.247039 4690 scope.go:117] "RemoveContainer" containerID="20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58" Mar 20 13:44:02 crc kubenswrapper[4690]: E0320 13:44:02.250013 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58\": container with ID starting with 20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58 not found: ID does not exist" containerID="20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.250402 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58"} err="failed to get container status \"20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58\": rpc error: code = NotFound desc = could not find container \"20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58\": container with ID starting with 20e65c30fd4040298e8c1b3867aefd2f552db0c0832d929391763042c9a7bd58 not found: ID does not exist" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.250484 4690 scope.go:117] "RemoveContainer" containerID="2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.232836 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.836761603 podStartE2EDuration="6.23281387s" podCreationTimestamp="2026-03-20 13:43:56 +0000 UTC" firstStartedPulling="2026-03-20 13:43:57.083062916 +0000 UTC m=+1283.372662859" lastFinishedPulling="2026-03-20 13:44:01.479115173 +0000 UTC m=+1287.768715126" observedRunningTime="2026-03-20 13:44:02.197332218 +0000 UTC m=+1288.486932161" watchObservedRunningTime="2026-03-20 13:44:02.23281387 +0000 UTC m=+1288.522413813" Mar 20 13:44:02 crc kubenswrapper[4690]: E0320 13:44:02.255384 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c\": container with ID starting with 2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c not found: ID does not exist" containerID="2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.255527 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c"} err="failed to get container status \"2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c\": rpc error: code = NotFound desc = could not find container \"2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c\": container with ID starting with 2d1852a98f8c2a412e67b0539334f556d58059b658f3c007c6dfd6ee375fb30c not found: ID does not exist" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.255599 4690 scope.go:117] "RemoveContainer" containerID="a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.280324 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-59dcfcb47f-zsm8d"] Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.302234 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-59dcfcb47f-zsm8d"] Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.302890 4690 scope.go:117] "RemoveContainer" containerID="0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.351827 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-xd7ns"] Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.363180 4690 scope.go:117] "RemoveContainer" containerID="a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.368763 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-848cf88cfc-xd7ns"] Mar 20 13:44:02 crc kubenswrapper[4690]: E0320 13:44:02.379232 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5\": container with ID starting with a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5 not found: ID does not exist" containerID="a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.379480 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5"} err="failed to get container status \"a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5\": rpc error: code = NotFound desc = could not find container \"a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5\": container with ID starting with a0b0c8ac3dffbca13d4ffc6139ba1ec13b775d8fd3e0716e5aba01c0962598c5 not found: ID does not exist" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.379670 4690 scope.go:117] "RemoveContainer" containerID="0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af" Mar 20 13:44:02 crc kubenswrapper[4690]: E0320 13:44:02.381453 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af\": container with ID starting with 0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af not found: ID does not exist" containerID="0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.381587 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af"} err="failed to get container status \"0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af\": rpc error: code = NotFound desc = could not find container \"0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af\": container with ID starting with 0ff98c5ed7e49ebcd46303bcca1d45073eccd6055835427e7343bfc461ff24af not found: ID does not exist" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.427037 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" path="/var/lib/kubelet/pods/1f4ce9f4-b42b-4bfe-9902-9f61ff722aec/volumes" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.428101 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45b97511-9613-4868-844e-689823a4ae38" path="/var/lib/kubelet/pods/45b97511-9613-4868-844e-689823a4ae38/volumes" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.778335 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:44:02 crc kubenswrapper[4690]: I0320 13:44:02.782340 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.043797 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-b994f67f8-wh5fd"] Mar 20 13:44:03 crc kubenswrapper[4690]: E0320 13:44:03.044501 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" containerName="init" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.044521 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" containerName="init" Mar 20 13:44:03 crc kubenswrapper[4690]: E0320 13:44:03.044532 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45b97511-9613-4868-844e-689823a4ae38" containerName="neutron-httpd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.044538 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="45b97511-9613-4868-844e-689823a4ae38" containerName="neutron-httpd" Mar 20 13:44:03 crc kubenswrapper[4690]: E0320 13:44:03.044558 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" containerName="dnsmasq-dns" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.044566 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" containerName="dnsmasq-dns" Mar 20 13:44:03 crc kubenswrapper[4690]: E0320 13:44:03.044575 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45b97511-9613-4868-844e-689823a4ae38" containerName="neutron-api" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.044581 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="45b97511-9613-4868-844e-689823a4ae38" containerName="neutron-api" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.044751 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f4ce9f4-b42b-4bfe-9902-9f61ff722aec" containerName="dnsmasq-dns" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.044766 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="45b97511-9613-4868-844e-689823a4ae38" containerName="neutron-httpd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.044781 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="45b97511-9613-4868-844e-689823a4ae38" containerName="neutron-api" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.047766 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.054175 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b994f67f8-wh5fd"] Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.180775 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-scripts\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.180943 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-public-tls-certs\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.181053 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsqlg\" (UniqueName: \"kubernetes.io/projected/31689f30-a5bb-4542-be03-9ca2c6aac585-kube-api-access-fsqlg\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.181082 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31689f30-a5bb-4542-be03-9ca2c6aac585-logs\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.181141 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-combined-ca-bundle\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.181194 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-internal-tls-certs\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.181226 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-config-data\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.207886 4690 generic.go:334] "Generic (PLEG): container finished" podID="3777e376-e740-4d36-9378-27dec9e98ec8" containerID="021d3d31373c90d17fcc5b543b2b52f1c4225fc63a4eff6b14cd6ba5fdedd6d6" exitCode=0 Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.207941 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566904-dq7dm" event={"ID":"3777e376-e740-4d36-9378-27dec9e98ec8","Type":"ContainerDied","Data":"021d3d31373c90d17fcc5b543b2b52f1c4225fc63a4eff6b14cd6ba5fdedd6d6"} Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.283437 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsqlg\" (UniqueName: \"kubernetes.io/projected/31689f30-a5bb-4542-be03-9ca2c6aac585-kube-api-access-fsqlg\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.283517 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31689f30-a5bb-4542-be03-9ca2c6aac585-logs\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.283557 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-combined-ca-bundle\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.283610 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-internal-tls-certs\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.283660 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-config-data\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.283743 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-scripts\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.283891 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-public-tls-certs\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.284057 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/31689f30-a5bb-4542-be03-9ca2c6aac585-logs\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.291424 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-public-tls-certs\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.291714 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-config-data\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.293381 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-internal-tls-certs\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.294888 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-scripts\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.304059 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31689f30-a5bb-4542-be03-9ca2c6aac585-combined-ca-bundle\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.310791 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsqlg\" (UniqueName: \"kubernetes.io/projected/31689f30-a5bb-4542-be03-9ca2c6aac585-kube-api-access-fsqlg\") pod \"placement-b994f67f8-wh5fd\" (UID: \"31689f30-a5bb-4542-be03-9ca2c6aac585\") " pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.375038 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.829303 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.829569 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:44:03 crc kubenswrapper[4690]: I0320 13:44:03.950111 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-b994f67f8-wh5fd"] Mar 20 13:44:04 crc kubenswrapper[4690]: I0320 13:44:04.231098 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b994f67f8-wh5fd" event={"ID":"31689f30-a5bb-4542-be03-9ca2c6aac585","Type":"ContainerStarted","Data":"d44b7d61f66dd5d729d81d4de1def4193a585f294d55129a7c9d6ead6a6eae1b"} Mar 20 13:44:04 crc kubenswrapper[4690]: I0320 13:44:04.233578 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b994f67f8-wh5fd" event={"ID":"31689f30-a5bb-4542-be03-9ca2c6aac585","Type":"ContainerStarted","Data":"8e673fdeed279b4c6976eb6ae425d08c20275478bea60c669043bda8ba885297"} Mar 20 13:44:04 crc kubenswrapper[4690]: I0320 13:44:04.602790 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566904-dq7dm" Mar 20 13:44:04 crc kubenswrapper[4690]: I0320 13:44:04.737794 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggtfs\" (UniqueName: \"kubernetes.io/projected/3777e376-e740-4d36-9378-27dec9e98ec8-kube-api-access-ggtfs\") pod \"3777e376-e740-4d36-9378-27dec9e98ec8\" (UID: \"3777e376-e740-4d36-9378-27dec9e98ec8\") " Mar 20 13:44:04 crc kubenswrapper[4690]: I0320 13:44:04.743068 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3777e376-e740-4d36-9378-27dec9e98ec8-kube-api-access-ggtfs" (OuterVolumeSpecName: "kube-api-access-ggtfs") pod "3777e376-e740-4d36-9378-27dec9e98ec8" (UID: "3777e376-e740-4d36-9378-27dec9e98ec8"). InnerVolumeSpecName "kube-api-access-ggtfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:04 crc kubenswrapper[4690]: I0320 13:44:04.839638 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggtfs\" (UniqueName: \"kubernetes.io/projected/3777e376-e740-4d36-9378-27dec9e98ec8-kube-api-access-ggtfs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.125455 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.241052 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-b994f67f8-wh5fd" event={"ID":"31689f30-a5bb-4542-be03-9ca2c6aac585","Type":"ContainerStarted","Data":"db31950332c5e3f66e784ca1384f3990bf4716791ef2c575008a4d648b6ffe14"} Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.241524 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.241553 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.243293 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566904-dq7dm" event={"ID":"3777e376-e740-4d36-9378-27dec9e98ec8","Type":"ContainerDied","Data":"9b7dd94b225f730b22c371ed16096beb3ce213d128731435ebfe20e33308c081"} Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.243310 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566904-dq7dm" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.243317 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b7dd94b225f730b22c371ed16096beb3ce213d128731435ebfe20e33308c081" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.255947 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-combined-ca-bundle\") pod \"fb3cd5da-dd91-4226-9f58-85a1bb729397\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.256031 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data\") pod \"fb3cd5da-dd91-4226-9f58-85a1bb729397\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.256051 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-scripts\") pod \"fb3cd5da-dd91-4226-9f58-85a1bb729397\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.256072 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb3cd5da-dd91-4226-9f58-85a1bb729397-etc-machine-id\") pod \"fb3cd5da-dd91-4226-9f58-85a1bb729397\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.256126 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data-custom\") pod \"fb3cd5da-dd91-4226-9f58-85a1bb729397\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.256195 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh8mt\" (UniqueName: \"kubernetes.io/projected/fb3cd5da-dd91-4226-9f58-85a1bb729397-kube-api-access-wh8mt\") pod \"fb3cd5da-dd91-4226-9f58-85a1bb729397\" (UID: \"fb3cd5da-dd91-4226-9f58-85a1bb729397\") " Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.257833 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fb3cd5da-dd91-4226-9f58-85a1bb729397-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fb3cd5da-dd91-4226-9f58-85a1bb729397" (UID: "fb3cd5da-dd91-4226-9f58-85a1bb729397"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.265340 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb3cd5da-dd91-4226-9f58-85a1bb729397-kube-api-access-wh8mt" (OuterVolumeSpecName: "kube-api-access-wh8mt") pod "fb3cd5da-dd91-4226-9f58-85a1bb729397" (UID: "fb3cd5da-dd91-4226-9f58-85a1bb729397"). InnerVolumeSpecName "kube-api-access-wh8mt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.268686 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fb3cd5da-dd91-4226-9f58-85a1bb729397" (UID: "fb3cd5da-dd91-4226-9f58-85a1bb729397"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.270328 4690 generic.go:334] "Generic (PLEG): container finished" podID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerID="a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9" exitCode=0 Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.270356 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.270378 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fb3cd5da-dd91-4226-9f58-85a1bb729397","Type":"ContainerDied","Data":"a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9"} Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.270773 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"fb3cd5da-dd91-4226-9f58-85a1bb729397","Type":"ContainerDied","Data":"9aee740f054a4df41678a2387d298240907c4a5754e0c1fd6955906f39067a33"} Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.270798 4690 scope.go:117] "RemoveContainer" containerID="e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.270512 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-scripts" (OuterVolumeSpecName: "scripts") pod "fb3cd5da-dd91-4226-9f58-85a1bb729397" (UID: "fb3cd5da-dd91-4226-9f58-85a1bb729397"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.285782 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-b994f67f8-wh5fd" podStartSLOduration=2.285764899 podStartE2EDuration="2.285764899s" podCreationTimestamp="2026-03-20 13:44:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:44:05.264525953 +0000 UTC m=+1291.554125906" watchObservedRunningTime="2026-03-20 13:44:05.285764899 +0000 UTC m=+1291.575364842" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.295474 4690 scope.go:117] "RemoveContainer" containerID="a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.319516 4690 scope.go:117] "RemoveContainer" containerID="e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913" Mar 20 13:44:05 crc kubenswrapper[4690]: E0320 13:44:05.320081 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913\": container with ID starting with e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913 not found: ID does not exist" containerID="e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.320196 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913"} err="failed to get container status \"e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913\": rpc error: code = NotFound desc = could not find container \"e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913\": container with ID starting with e8d254fcd78b1094524239c2cf1453efd7d088e7b4816f4747a9425f0b063913 not found: ID does not exist" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.320315 4690 scope.go:117] "RemoveContainer" containerID="a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9" Mar 20 13:44:05 crc kubenswrapper[4690]: E0320 13:44:05.320836 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9\": container with ID starting with a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9 not found: ID does not exist" containerID="a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.320897 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9"} err="failed to get container status \"a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9\": rpc error: code = NotFound desc = could not find container \"a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9\": container with ID starting with a1c7ca78fdd284be2745d5cace11cd1c845802ae2c309e25cf844691df753ea9 not found: ID does not exist" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.350529 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fb3cd5da-dd91-4226-9f58-85a1bb729397" (UID: "fb3cd5da-dd91-4226-9f58-85a1bb729397"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.359590 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.359617 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.359626 4690 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fb3cd5da-dd91-4226-9f58-85a1bb729397-etc-machine-id\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.359634 4690 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.359642 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh8mt\" (UniqueName: \"kubernetes.io/projected/fb3cd5da-dd91-4226-9f58-85a1bb729397-kube-api-access-wh8mt\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.378304 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data" (OuterVolumeSpecName: "config-data") pod "fb3cd5da-dd91-4226-9f58-85a1bb729397" (UID: "fb3cd5da-dd91-4226-9f58-85a1bb729397"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.461734 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fb3cd5da-dd91-4226-9f58-85a1bb729397-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.603948 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.612443 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.625062 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Mar 20 13:44:05 crc kubenswrapper[4690]: E0320 13:44:05.625520 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerName="probe" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.625545 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerName="probe" Mar 20 13:44:05 crc kubenswrapper[4690]: E0320 13:44:05.625570 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerName="cinder-scheduler" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.625579 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerName="cinder-scheduler" Mar 20 13:44:05 crc kubenswrapper[4690]: E0320 13:44:05.625597 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3777e376-e740-4d36-9378-27dec9e98ec8" containerName="oc" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.625606 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="3777e376-e740-4d36-9378-27dec9e98ec8" containerName="oc" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.625824 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="3777e376-e740-4d36-9378-27dec9e98ec8" containerName="oc" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.625863 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerName="probe" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.625881 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb3cd5da-dd91-4226-9f58-85a1bb729397" containerName="cinder-scheduler" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.627205 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.630363 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.641858 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.697194 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566898-244jx"] Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.704674 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566898-244jx"] Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.767410 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t8qj4\" (UniqueName: \"kubernetes.io/projected/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-kube-api-access-t8qj4\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.767474 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-scripts\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.767501 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.767546 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-config-data\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.767689 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.767729 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.869721 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.869783 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.869835 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.869959 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t8qj4\" (UniqueName: \"kubernetes.io/projected/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-kube-api-access-t8qj4\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.869990 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-scripts\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.870010 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.870048 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-config-data\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.874100 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.874119 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.874431 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-config-data\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.880180 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-scripts\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.887308 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t8qj4\" (UniqueName: \"kubernetes.io/projected/b820f2a9-930b-44d0-a2c3-ad73e87c4ebb-kube-api-access-t8qj4\") pod \"cinder-scheduler-0\" (UID: \"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb\") " pod="openstack/cinder-scheduler-0" Mar 20 13:44:05 crc kubenswrapper[4690]: I0320 13:44:05.945222 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Mar 20 13:44:06 crc kubenswrapper[4690]: I0320 13:44:06.281237 4690 generic.go:334] "Generic (PLEG): container finished" podID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerID="05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34" exitCode=0 Mar 20 13:44:06 crc kubenswrapper[4690]: I0320 13:44:06.281302 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-587c585984-xs7nl" event={"ID":"ae74738f-0b10-4955-97fb-e892ca7102a0","Type":"ContainerDied","Data":"05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34"} Mar 20 13:44:06 crc kubenswrapper[4690]: I0320 13:44:06.424610 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5702a850-32be-49da-bd61-58a9f6088792" path="/var/lib/kubelet/pods/5702a850-32be-49da-bd61-58a9f6088792/volumes" Mar 20 13:44:06 crc kubenswrapper[4690]: I0320 13:44:06.425469 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb3cd5da-dd91-4226-9f58-85a1bb729397" path="/var/lib/kubelet/pods/fb3cd5da-dd91-4226-9f58-85a1bb729397/volumes" Mar 20 13:44:06 crc kubenswrapper[4690]: I0320 13:44:06.451265 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Mar 20 13:44:06 crc kubenswrapper[4690]: W0320 13:44:06.458196 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb820f2a9_930b_44d0_a2c3_ad73e87c4ebb.slice/crio-6f26a6507492f6c594bbde50ffc7011016e40a433ee0b5007222081cb2cbf12b WatchSource:0}: Error finding container 6f26a6507492f6c594bbde50ffc7011016e40a433ee0b5007222081cb2cbf12b: Status 404 returned error can't find the container with id 6f26a6507492f6c594bbde50ffc7011016e40a433ee0b5007222081cb2cbf12b Mar 20 13:44:06 crc kubenswrapper[4690]: I0320 13:44:06.814635 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-587c585984-xs7nl" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Mar 20 13:44:07 crc kubenswrapper[4690]: I0320 13:44:07.299589 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb","Type":"ContainerStarted","Data":"8884cd70c0dbd1e1e3ac99d1ae1f2f56f9cb2e05c74e9832f67e9099cf06ac6b"} Mar 20 13:44:07 crc kubenswrapper[4690]: I0320 13:44:07.299630 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb","Type":"ContainerStarted","Data":"6f26a6507492f6c594bbde50ffc7011016e40a433ee0b5007222081cb2cbf12b"} Mar 20 13:44:08 crc kubenswrapper[4690]: I0320 13:44:08.278905 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Mar 20 13:44:08 crc kubenswrapper[4690]: I0320 13:44:08.312724 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"b820f2a9-930b-44d0-a2c3-ad73e87c4ebb","Type":"ContainerStarted","Data":"36c079a2c22cf338392ef5e862b9743f9b4cbb5eb5976362bdf60b418ba570bd"} Mar 20 13:44:10 crc kubenswrapper[4690]: I0320 13:44:10.153956 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7b6fc496fc-2z4sr" Mar 20 13:44:10 crc kubenswrapper[4690]: I0320 13:44:10.185680 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=5.185664646 podStartE2EDuration="5.185664646s" podCreationTimestamp="2026-03-20 13:44:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:44:08.356233486 +0000 UTC m=+1294.645833449" watchObservedRunningTime="2026-03-20 13:44:10.185664646 +0000 UTC m=+1296.475264589" Mar 20 13:44:10 crc kubenswrapper[4690]: I0320 13:44:10.945786 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.197131 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-58c55dcc8c-ddx5k"] Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.213192 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-58c55dcc8c-ddx5k"] Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.213330 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.215732 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.215957 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.217740 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.383404 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-public-tls-certs\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.383486 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rxkfc\" (UniqueName: \"kubernetes.io/projected/636ad0c0-e301-4d1b-8ad6-e4094424024f-kube-api-access-rxkfc\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.383519 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/636ad0c0-e301-4d1b-8ad6-e4094424024f-log-httpd\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.383667 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-combined-ca-bundle\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.383729 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/636ad0c0-e301-4d1b-8ad6-e4094424024f-run-httpd\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.383759 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-config-data\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.383879 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-internal-tls-certs\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.389208 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/636ad0c0-e301-4d1b-8ad6-e4094424024f-etc-swift\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.396965 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.397407 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="ceilometer-central-agent" containerID="cri-o://2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8" gracePeriod=30 Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.397461 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="proxy-httpd" containerID="cri-o://62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37" gracePeriod=30 Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.397567 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="ceilometer-notification-agent" containerID="cri-o://1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a" gracePeriod=30 Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.397701 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="sg-core" containerID="cri-o://e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7" gracePeriod=30 Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.407879 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.172:3000/\": EOF" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.491336 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rxkfc\" (UniqueName: \"kubernetes.io/projected/636ad0c0-e301-4d1b-8ad6-e4094424024f-kube-api-access-rxkfc\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.491380 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/636ad0c0-e301-4d1b-8ad6-e4094424024f-log-httpd\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.491431 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-combined-ca-bundle\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.491463 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/636ad0c0-e301-4d1b-8ad6-e4094424024f-run-httpd\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.491482 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-config-data\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.491501 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-internal-tls-certs\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.491554 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/636ad0c0-e301-4d1b-8ad6-e4094424024f-etc-swift\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.491599 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-public-tls-certs\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.492365 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/636ad0c0-e301-4d1b-8ad6-e4094424024f-run-httpd\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.493071 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/636ad0c0-e301-4d1b-8ad6-e4094424024f-log-httpd\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.497413 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-internal-tls-certs\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.497761 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/636ad0c0-e301-4d1b-8ad6-e4094424024f-etc-swift\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.499322 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-combined-ca-bundle\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.499337 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-config-data\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.513703 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/636ad0c0-e301-4d1b-8ad6-e4094424024f-public-tls-certs\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.514976 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rxkfc\" (UniqueName: \"kubernetes.io/projected/636ad0c0-e301-4d1b-8ad6-e4094424024f-kube-api-access-rxkfc\") pod \"swift-proxy-58c55dcc8c-ddx5k\" (UID: \"636ad0c0-e301-4d1b-8ad6-e4094424024f\") " pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.534337 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.795008 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.803285 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.807051 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.807528 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.807813 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-m9b9g" Mar 20 13:44:14 crc kubenswrapper[4690]: I0320 13:44:14.815335 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.011820 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac9d04ea-6675-4512-8957-0b4d67157b15-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.011927 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ac9d04ea-6675-4512-8957-0b4d67157b15-openstack-config-secret\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.011982 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ac9d04ea-6675-4512-8957-0b4d67157b15-openstack-config\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.012046 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78scs\" (UniqueName: \"kubernetes.io/projected/ac9d04ea-6675-4512-8957-0b4d67157b15-kube-api-access-78scs\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.114249 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac9d04ea-6675-4512-8957-0b4d67157b15-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.114311 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ac9d04ea-6675-4512-8957-0b4d67157b15-openstack-config-secret\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.114366 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ac9d04ea-6675-4512-8957-0b4d67157b15-openstack-config\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.114434 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78scs\" (UniqueName: \"kubernetes.io/projected/ac9d04ea-6675-4512-8957-0b4d67157b15-kube-api-access-78scs\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.115881 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ac9d04ea-6675-4512-8957-0b4d67157b15-openstack-config\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.121110 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ac9d04ea-6675-4512-8957-0b4d67157b15-openstack-config-secret\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.121197 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ac9d04ea-6675-4512-8957-0b4d67157b15-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.145614 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78scs\" (UniqueName: \"kubernetes.io/projected/ac9d04ea-6675-4512-8957-0b4d67157b15-kube-api-access-78scs\") pod \"openstackclient\" (UID: \"ac9d04ea-6675-4512-8957-0b4d67157b15\") " pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.187755 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.210593 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-58c55dcc8c-ddx5k"] Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.212565 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.214879 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data-custom\") pod \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.215588 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgrtt\" (UniqueName: \"kubernetes.io/projected/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-kube-api-access-kgrtt\") pod \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.215648 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data\") pod \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.215694 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-combined-ca-bundle\") pod \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.215843 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-logs\") pod \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\" (UID: \"f369e2b4-2fa6-42fb-b77e-869dcf0ad829\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.216512 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-logs" (OuterVolumeSpecName: "logs") pod "f369e2b4-2fa6-42fb-b77e-869dcf0ad829" (UID: "f369e2b4-2fa6-42fb-b77e-869dcf0ad829"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.219081 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "f369e2b4-2fa6-42fb-b77e-869dcf0ad829" (UID: "f369e2b4-2fa6-42fb-b77e-869dcf0ad829"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.220647 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-kube-api-access-kgrtt" (OuterVolumeSpecName: "kube-api-access-kgrtt") pod "f369e2b4-2fa6-42fb-b77e-869dcf0ad829" (UID: "f369e2b4-2fa6-42fb-b77e-869dcf0ad829"). InnerVolumeSpecName "kube-api-access-kgrtt". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: W0320 13:44:15.290459 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod636ad0c0_e301_4d1b_8ad6_e4094424024f.slice/crio-a69d9ab6677d909a5b4f6cc71fd15d063f4a78acc2a797237b48824aa3a0808b WatchSource:0}: Error finding container a69d9ab6677d909a5b4f6cc71fd15d063f4a78acc2a797237b48824aa3a0808b: Status 404 returned error can't find the container with id a69d9ab6677d909a5b4f6cc71fd15d063f4a78acc2a797237b48824aa3a0808b Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.294863 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f369e2b4-2fa6-42fb-b77e-869dcf0ad829" (UID: "f369e2b4-2fa6-42fb-b77e-869dcf0ad829"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.320832 4690 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data-custom\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.321881 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgrtt\" (UniqueName: \"kubernetes.io/projected/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-kube-api-access-kgrtt\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.321974 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.322084 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.330275 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data" (OuterVolumeSpecName: "config-data") pod "f369e2b4-2fa6-42fb-b77e-869dcf0ad829" (UID: "f369e2b4-2fa6-42fb-b77e-869dcf0ad829"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.367477 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399174 4690 generic.go:334] "Generic (PLEG): container finished" podID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerID="62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37" exitCode=0 Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399202 4690 generic.go:334] "Generic (PLEG): container finished" podID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerID="e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7" exitCode=2 Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399209 4690 generic.go:334] "Generic (PLEG): container finished" podID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerID="1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a" exitCode=0 Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399217 4690 generic.go:334] "Generic (PLEG): container finished" podID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerID="2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8" exitCode=0 Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399252 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399263 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerDied","Data":"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37"} Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399284 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerDied","Data":"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7"} Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399294 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerDied","Data":"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a"} Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399303 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerDied","Data":"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8"} Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399311 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0675c9f3-f43f-4b81-841a-555b9d7152fd","Type":"ContainerDied","Data":"915ee46e3211699019c4d40892e9020474bc1a377e51f791985269826f16a776"} Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.399324 4690 scope.go:117] "RemoveContainer" containerID="62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.404458 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" event={"ID":"636ad0c0-e301-4d1b-8ad6-e4094424024f","Type":"ContainerStarted","Data":"a69d9ab6677d909a5b4f6cc71fd15d063f4a78acc2a797237b48824aa3a0808b"} Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.406563 4690 generic.go:334] "Generic (PLEG): container finished" podID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerID="8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836" exitCode=137 Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.406597 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" event={"ID":"f369e2b4-2fa6-42fb-b77e-869dcf0ad829","Type":"ContainerDied","Data":"8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836"} Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.406617 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" event={"ID":"f369e2b4-2fa6-42fb-b77e-869dcf0ad829","Type":"ContainerDied","Data":"70258873d05ff8a9d173fd7a961cd1b3ffcb278982c5c58dc6cbf23c0ddfc662"} Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.406667 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-d9f9c5f67-rdpbv" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.423142 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-sg-core-conf-yaml\") pod \"0675c9f3-f43f-4b81-841a-555b9d7152fd\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.423498 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-scripts\") pod \"0675c9f3-f43f-4b81-841a-555b9d7152fd\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.423546 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-config-data\") pod \"0675c9f3-f43f-4b81-841a-555b9d7152fd\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.423670 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-log-httpd\") pod \"0675c9f3-f43f-4b81-841a-555b9d7152fd\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.423689 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-combined-ca-bundle\") pod \"0675c9f3-f43f-4b81-841a-555b9d7152fd\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.423735 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-run-httpd\") pod \"0675c9f3-f43f-4b81-841a-555b9d7152fd\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.423837 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wh6bb\" (UniqueName: \"kubernetes.io/projected/0675c9f3-f43f-4b81-841a-555b9d7152fd-kube-api-access-wh6bb\") pod \"0675c9f3-f43f-4b81-841a-555b9d7152fd\" (UID: \"0675c9f3-f43f-4b81-841a-555b9d7152fd\") " Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.424398 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0675c9f3-f43f-4b81-841a-555b9d7152fd" (UID: "0675c9f3-f43f-4b81-841a-555b9d7152fd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.424759 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f369e2b4-2fa6-42fb-b77e-869dcf0ad829-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.424765 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0675c9f3-f43f-4b81-841a-555b9d7152fd" (UID: "0675c9f3-f43f-4b81-841a-555b9d7152fd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.424775 4690 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.428001 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-scripts" (OuterVolumeSpecName: "scripts") pod "0675c9f3-f43f-4b81-841a-555b9d7152fd" (UID: "0675c9f3-f43f-4b81-841a-555b9d7152fd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.429388 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0675c9f3-f43f-4b81-841a-555b9d7152fd-kube-api-access-wh6bb" (OuterVolumeSpecName: "kube-api-access-wh6bb") pod "0675c9f3-f43f-4b81-841a-555b9d7152fd" (UID: "0675c9f3-f43f-4b81-841a-555b9d7152fd"). InnerVolumeSpecName "kube-api-access-wh6bb". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.449061 4690 scope.go:117] "RemoveContainer" containerID="e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.455376 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-worker-d9f9c5f67-rdpbv"] Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.462923 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-worker-d9f9c5f67-rdpbv"] Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.482240 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0675c9f3-f43f-4b81-841a-555b9d7152fd" (UID: "0675c9f3-f43f-4b81-841a-555b9d7152fd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.507468 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0675c9f3-f43f-4b81-841a-555b9d7152fd" (UID: "0675c9f3-f43f-4b81-841a-555b9d7152fd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.539698 4690 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.539738 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.539748 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.539756 4690 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0675c9f3-f43f-4b81-841a-555b9d7152fd-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.539765 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wh6bb\" (UniqueName: \"kubernetes.io/projected/0675c9f3-f43f-4b81-841a-555b9d7152fd-kube-api-access-wh6bb\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.541873 4690 scope.go:117] "RemoveContainer" containerID="1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.554301 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-config-data" (OuterVolumeSpecName: "config-data") pod "0675c9f3-f43f-4b81-841a-555b9d7152fd" (UID: "0675c9f3-f43f-4b81-841a-555b9d7152fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.632277 4690 scope.go:117] "RemoveContainer" containerID="2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.640788 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0675c9f3-f43f-4b81-841a-555b9d7152fd-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.656122 4690 scope.go:117] "RemoveContainer" containerID="62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.656560 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37\": container with ID starting with 62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37 not found: ID does not exist" containerID="62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.656588 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37"} err="failed to get container status \"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37\": rpc error: code = NotFound desc = could not find container \"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37\": container with ID starting with 62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.656611 4690 scope.go:117] "RemoveContainer" containerID="e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.656821 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7\": container with ID starting with e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7 not found: ID does not exist" containerID="e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.656862 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7"} err="failed to get container status \"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7\": rpc error: code = NotFound desc = could not find container \"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7\": container with ID starting with e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.656875 4690 scope.go:117] "RemoveContainer" containerID="1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.657275 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a\": container with ID starting with 1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a not found: ID does not exist" containerID="1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.657339 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a"} err="failed to get container status \"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a\": rpc error: code = NotFound desc = could not find container \"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a\": container with ID starting with 1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.657358 4690 scope.go:117] "RemoveContainer" containerID="2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.657592 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8\": container with ID starting with 2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8 not found: ID does not exist" containerID="2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.657617 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8"} err="failed to get container status \"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8\": rpc error: code = NotFound desc = could not find container \"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8\": container with ID starting with 2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.657632 4690 scope.go:117] "RemoveContainer" containerID="62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.657879 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37"} err="failed to get container status \"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37\": rpc error: code = NotFound desc = could not find container \"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37\": container with ID starting with 62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.657901 4690 scope.go:117] "RemoveContainer" containerID="e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.658347 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7"} err="failed to get container status \"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7\": rpc error: code = NotFound desc = could not find container \"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7\": container with ID starting with e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.658367 4690 scope.go:117] "RemoveContainer" containerID="1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.658689 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a"} err="failed to get container status \"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a\": rpc error: code = NotFound desc = could not find container \"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a\": container with ID starting with 1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.658711 4690 scope.go:117] "RemoveContainer" containerID="2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.659091 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8"} err="failed to get container status \"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8\": rpc error: code = NotFound desc = could not find container \"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8\": container with ID starting with 2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.659108 4690 scope.go:117] "RemoveContainer" containerID="62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.659368 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37"} err="failed to get container status \"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37\": rpc error: code = NotFound desc = could not find container \"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37\": container with ID starting with 62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.659390 4690 scope.go:117] "RemoveContainer" containerID="e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.659596 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7"} err="failed to get container status \"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7\": rpc error: code = NotFound desc = could not find container \"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7\": container with ID starting with e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.659615 4690 scope.go:117] "RemoveContainer" containerID="1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.660007 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a"} err="failed to get container status \"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a\": rpc error: code = NotFound desc = could not find container \"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a\": container with ID starting with 1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.660049 4690 scope.go:117] "RemoveContainer" containerID="2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.660435 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8"} err="failed to get container status \"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8\": rpc error: code = NotFound desc = could not find container \"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8\": container with ID starting with 2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.660456 4690 scope.go:117] "RemoveContainer" containerID="62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.660668 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37"} err="failed to get container status \"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37\": rpc error: code = NotFound desc = could not find container \"62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37\": container with ID starting with 62a2c711a83a7858d44246e4519b1009a9a9fce20e14f6f40027e92228b58b37 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.660687 4690 scope.go:117] "RemoveContainer" containerID="e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.661021 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7"} err="failed to get container status \"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7\": rpc error: code = NotFound desc = could not find container \"e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7\": container with ID starting with e563cadf0d317a834532ccfe3cf358778a6051ee0faa0c136603285b487ce1f7 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.661044 4690 scope.go:117] "RemoveContainer" containerID="1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.661271 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a"} err="failed to get container status \"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a\": rpc error: code = NotFound desc = could not find container \"1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a\": container with ID starting with 1c02bfb326288554a716431d1255569ecedf1033a485b682c2e12d58557b202a not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.661291 4690 scope.go:117] "RemoveContainer" containerID="2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.661492 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8"} err="failed to get container status \"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8\": rpc error: code = NotFound desc = could not find container \"2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8\": container with ID starting with 2282a7209657f403c17119d3f53b9e0ebde4b74331f8bf126642f0fbacfd4ed8 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.661509 4690 scope.go:117] "RemoveContainer" containerID="8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.679880 4690 scope.go:117] "RemoveContainer" containerID="87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.699565 4690 scope.go:117] "RemoveContainer" containerID="8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.700131 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836\": container with ID starting with 8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836 not found: ID does not exist" containerID="8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.700189 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836"} err="failed to get container status \"8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836\": rpc error: code = NotFound desc = could not find container \"8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836\": container with ID starting with 8e04b9d92c22e0d75fc381ae1a9d00cd12b16dd3c61bd59b7cb5290890313836 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.700233 4690 scope.go:117] "RemoveContainer" containerID="87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.700540 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32\": container with ID starting with 87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32 not found: ID does not exist" containerID="87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.700566 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32"} err="failed to get container status \"87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32\": rpc error: code = NotFound desc = could not find container \"87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32\": container with ID starting with 87ff7c94e149e0f47f56e6a85a8f47a5316e943f03faa948338ea0d010d22d32 not found: ID does not exist" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.743441 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.758821 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.773173 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.781958 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.782408 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="ceilometer-notification-agent" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782434 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="ceilometer-notification-agent" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.782478 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="ceilometer-central-agent" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782490 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="ceilometer-central-agent" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.782509 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerName="barbican-worker-log" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782518 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerName="barbican-worker-log" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.782543 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerName="barbican-worker" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782552 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerName="barbican-worker" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.782566 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="proxy-httpd" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782573 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="proxy-httpd" Mar 20 13:44:15 crc kubenswrapper[4690]: E0320 13:44:15.782609 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="sg-core" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782618 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="sg-core" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782811 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="ceilometer-notification-agent" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782824 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="ceilometer-central-agent" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782840 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerName="barbican-worker-log" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782872 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" containerName="barbican-worker" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782883 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="proxy-httpd" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.782904 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" containerName="sg-core" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.784742 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.786327 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.827360 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.827581 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.853072 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.853114 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zzxf\" (UniqueName: \"kubernetes.io/projected/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-kube-api-access-4zzxf\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.853133 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-run-httpd\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.853162 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-scripts\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.853288 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.853413 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-log-httpd\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.853992 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-config-data\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.956107 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.956484 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zzxf\" (UniqueName: \"kubernetes.io/projected/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-kube-api-access-4zzxf\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.956511 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-run-httpd\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.956546 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-scripts\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.956577 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.956601 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-log-httpd\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.956648 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-config-data\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.959591 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-run-httpd\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.959829 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-log-httpd\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.966567 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.966693 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.967094 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-scripts\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.967183 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-config-data\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:15 crc kubenswrapper[4690]: I0320 13:44:15.978604 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zzxf\" (UniqueName: \"kubernetes.io/projected/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-kube-api-access-4zzxf\") pod \"ceilometer-0\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " pod="openstack/ceilometer-0" Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.136513 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.206893 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.427698 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0675c9f3-f43f-4b81-841a-555b9d7152fd" path="/var/lib/kubelet/pods/0675c9f3-f43f-4b81-841a-555b9d7152fd/volumes" Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.429002 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f369e2b4-2fa6-42fb-b77e-869dcf0ad829" path="/var/lib/kubelet/pods/f369e2b4-2fa6-42fb-b77e-869dcf0ad829/volumes" Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.429603 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ac9d04ea-6675-4512-8957-0b4d67157b15","Type":"ContainerStarted","Data":"db52f811c4d64bebd1767ef569ada27352f14f55c159e96b5a402deb1fb2d9f1"} Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.433384 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" event={"ID":"636ad0c0-e301-4d1b-8ad6-e4094424024f","Type":"ContainerStarted","Data":"e7e5dcfa68b958c841ac6b05bad359bd5054b8357d5b973b9afc095a2a789ea0"} Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.433437 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" event={"ID":"636ad0c0-e301-4d1b-8ad6-e4094424024f","Type":"ContainerStarted","Data":"7bdcf46530db0a23fd0828cf2ab248ec4740acab543667116dfdf8c6966063cc"} Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.434058 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.434101 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.460157 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" podStartSLOduration=2.46013345 podStartE2EDuration="2.46013345s" podCreationTimestamp="2026-03-20 13:44:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:44:16.451525075 +0000 UTC m=+1302.741125028" watchObservedRunningTime="2026-03-20 13:44:16.46013345 +0000 UTC m=+1302.749733393" Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.657286 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:16 crc kubenswrapper[4690]: I0320 13:44:16.815625 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-587c585984-xs7nl" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Mar 20 13:44:17 crc kubenswrapper[4690]: I0320 13:44:17.478277 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerStarted","Data":"31644709fda99b597e211adc8e6a32ce863fc72dfd69ce8b9100275f654b4d82"} Mar 20 13:44:18 crc kubenswrapper[4690]: I0320 13:44:18.499662 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerStarted","Data":"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447"} Mar 20 13:44:19 crc kubenswrapper[4690]: I0320 13:44:19.512890 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerStarted","Data":"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4"} Mar 20 13:44:19 crc kubenswrapper[4690]: I0320 13:44:19.513288 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerStarted","Data":"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0"} Mar 20 13:44:23 crc kubenswrapper[4690]: I0320 13:44:23.452745 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:24 crc kubenswrapper[4690]: I0320 13:44:24.543368 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:24 crc kubenswrapper[4690]: I0320 13:44:24.545998 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-58c55dcc8c-ddx5k" Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.585373 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerStarted","Data":"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338"} Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.585835 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.585523 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="sg-core" containerID="cri-o://55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4" gracePeriod=30 Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.585468 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="ceilometer-central-agent" containerID="cri-o://e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447" gracePeriod=30 Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.585533 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="ceilometer-notification-agent" containerID="cri-o://c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0" gracePeriod=30 Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.585534 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="proxy-httpd" containerID="cri-o://75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338" gracePeriod=30 Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.587617 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ac9d04ea-6675-4512-8957-0b4d67157b15","Type":"ContainerStarted","Data":"17bd739832b1c1218e04ea65f672e5b7977ecafa9f5bac8c06122165342d8ff6"} Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.609183 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.917149969 podStartE2EDuration="11.60916625s" podCreationTimestamp="2026-03-20 13:44:15 +0000 UTC" firstStartedPulling="2026-03-20 13:44:16.675320308 +0000 UTC m=+1302.964920251" lastFinishedPulling="2026-03-20 13:44:25.367336599 +0000 UTC m=+1311.656936532" observedRunningTime="2026-03-20 13:44:26.604735433 +0000 UTC m=+1312.894335376" watchObservedRunningTime="2026-03-20 13:44:26.60916625 +0000 UTC m=+1312.898766193" Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.623716 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=3.02630055 podStartE2EDuration="12.623693784s" podCreationTimestamp="2026-03-20 13:44:14 +0000 UTC" firstStartedPulling="2026-03-20 13:44:15.772785826 +0000 UTC m=+1302.062385769" lastFinishedPulling="2026-03-20 13:44:25.37017906 +0000 UTC m=+1311.659779003" observedRunningTime="2026-03-20 13:44:26.618000322 +0000 UTC m=+1312.907600265" watchObservedRunningTime="2026-03-20 13:44:26.623693784 +0000 UTC m=+1312.913293727" Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.736790 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-64dbdc6bf-bqlx5" Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.806976 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-65b6bf5884-9kvqh"] Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.807446 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-65b6bf5884-9kvqh" podUID="70037527-dc76-4c31-9841-6cc6b27fe032" containerName="neutron-api" containerID="cri-o://1ee497351d53e8a9f9a8191685b5badaba8117a7b558f7bb5ab2d0b931252a50" gracePeriod=30 Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.807596 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-65b6bf5884-9kvqh" podUID="70037527-dc76-4c31-9841-6cc6b27fe032" containerName="neutron-httpd" containerID="cri-o://beccd301a63fd5b4871cde64deb7972cdfb711b02983ef69bd12f554c35d77ad" gracePeriod=30 Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.815506 4690 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-587c585984-xs7nl" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.152:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.152:8443: connect: connection refused" Mar 20 13:44:26 crc kubenswrapper[4690]: I0320 13:44:26.815608 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.547620 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.596891 4690 generic.go:334] "Generic (PLEG): container finished" podID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerID="75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338" exitCode=0 Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.596918 4690 generic.go:334] "Generic (PLEG): container finished" podID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerID="55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4" exitCode=2 Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.596945 4690 generic.go:334] "Generic (PLEG): container finished" podID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerID="c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0" exitCode=0 Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.596953 4690 generic.go:334] "Generic (PLEG): container finished" podID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerID="e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447" exitCode=0 Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.596951 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerDied","Data":"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338"} Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.596986 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerDied","Data":"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4"} Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.596989 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.597021 4690 scope.go:117] "RemoveContainer" containerID="75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.596999 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerDied","Data":"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0"} Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.597080 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerDied","Data":"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447"} Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.597090 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc","Type":"ContainerDied","Data":"31644709fda99b597e211adc8e6a32ce863fc72dfd69ce8b9100275f654b4d82"} Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.598982 4690 generic.go:334] "Generic (PLEG): container finished" podID="70037527-dc76-4c31-9841-6cc6b27fe032" containerID="beccd301a63fd5b4871cde64deb7972cdfb711b02983ef69bd12f554c35d77ad" exitCode=0 Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.599040 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65b6bf5884-9kvqh" event={"ID":"70037527-dc76-4c31-9841-6cc6b27fe032","Type":"ContainerDied","Data":"beccd301a63fd5b4871cde64deb7972cdfb711b02983ef69bd12f554c35d77ad"} Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.620465 4690 scope.go:117] "RemoveContainer" containerID="55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.658152 4690 scope.go:117] "RemoveContainer" containerID="c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.683797 4690 scope.go:117] "RemoveContainer" containerID="e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.689780 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4zzxf\" (UniqueName: \"kubernetes.io/projected/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-kube-api-access-4zzxf\") pod \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.689877 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-scripts\") pod \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.689989 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-run-httpd\") pod \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.690023 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-sg-core-conf-yaml\") pod \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.690061 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-config-data\") pod \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.690117 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-log-httpd\") pod \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.690170 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-combined-ca-bundle\") pod \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\" (UID: \"25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc\") " Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.691265 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" (UID: "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.691277 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" (UID: "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.698079 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-scripts" (OuterVolumeSpecName: "scripts") pod "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" (UID: "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.705031 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-kube-api-access-4zzxf" (OuterVolumeSpecName: "kube-api-access-4zzxf") pod "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" (UID: "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc"). InnerVolumeSpecName "kube-api-access-4zzxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.717746 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" (UID: "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.777816 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" (UID: "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.792570 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.792602 4690 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.792614 4690 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.792631 4690 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.792642 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.792654 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4zzxf\" (UniqueName: \"kubernetes.io/projected/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-kube-api-access-4zzxf\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.794930 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-config-data" (OuterVolumeSpecName: "config-data") pod "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" (UID: "25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.884138 4690 scope.go:117] "RemoveContainer" containerID="75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338" Mar 20 13:44:27 crc kubenswrapper[4690]: E0320 13:44:27.885478 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338\": container with ID starting with 75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338 not found: ID does not exist" containerID="75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.885525 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338"} err="failed to get container status \"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338\": rpc error: code = NotFound desc = could not find container \"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338\": container with ID starting with 75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.885552 4690 scope.go:117] "RemoveContainer" containerID="55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4" Mar 20 13:44:27 crc kubenswrapper[4690]: E0320 13:44:27.886692 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4\": container with ID starting with 55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4 not found: ID does not exist" containerID="55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.886830 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4"} err="failed to get container status \"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4\": rpc error: code = NotFound desc = could not find container \"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4\": container with ID starting with 55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.886965 4690 scope.go:117] "RemoveContainer" containerID="c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0" Mar 20 13:44:27 crc kubenswrapper[4690]: E0320 13:44:27.887441 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0\": container with ID starting with c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0 not found: ID does not exist" containerID="c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.887465 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0"} err="failed to get container status \"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0\": rpc error: code = NotFound desc = could not find container \"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0\": container with ID starting with c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.887479 4690 scope.go:117] "RemoveContainer" containerID="e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447" Mar 20 13:44:27 crc kubenswrapper[4690]: E0320 13:44:27.888073 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447\": container with ID starting with e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447 not found: ID does not exist" containerID="e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.888202 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447"} err="failed to get container status \"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447\": rpc error: code = NotFound desc = could not find container \"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447\": container with ID starting with e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.888316 4690 scope.go:117] "RemoveContainer" containerID="75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.889658 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338"} err="failed to get container status \"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338\": rpc error: code = NotFound desc = could not find container \"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338\": container with ID starting with 75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.889685 4690 scope.go:117] "RemoveContainer" containerID="55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.890170 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4"} err="failed to get container status \"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4\": rpc error: code = NotFound desc = could not find container \"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4\": container with ID starting with 55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.890190 4690 scope.go:117] "RemoveContainer" containerID="c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.890413 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0"} err="failed to get container status \"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0\": rpc error: code = NotFound desc = could not find container \"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0\": container with ID starting with c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.890434 4690 scope.go:117] "RemoveContainer" containerID="e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.890656 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447"} err="failed to get container status \"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447\": rpc error: code = NotFound desc = could not find container \"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447\": container with ID starting with e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.890673 4690 scope.go:117] "RemoveContainer" containerID="75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.890881 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338"} err="failed to get container status \"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338\": rpc error: code = NotFound desc = could not find container \"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338\": container with ID starting with 75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.890905 4690 scope.go:117] "RemoveContainer" containerID="55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.893462 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4"} err="failed to get container status \"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4\": rpc error: code = NotFound desc = could not find container \"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4\": container with ID starting with 55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.893519 4690 scope.go:117] "RemoveContainer" containerID="c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.893955 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0"} err="failed to get container status \"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0\": rpc error: code = NotFound desc = could not find container \"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0\": container with ID starting with c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.893977 4690 scope.go:117] "RemoveContainer" containerID="e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.894229 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.894244 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447"} err="failed to get container status \"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447\": rpc error: code = NotFound desc = could not find container \"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447\": container with ID starting with e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.894274 4690 scope.go:117] "RemoveContainer" containerID="75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.895208 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338"} err="failed to get container status \"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338\": rpc error: code = NotFound desc = could not find container \"75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338\": container with ID starting with 75d17a6adb3239b4fd2c85a0ff7c8a65899d0ed0239d5938438d544721878338 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.895246 4690 scope.go:117] "RemoveContainer" containerID="55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.895631 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4"} err="failed to get container status \"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4\": rpc error: code = NotFound desc = could not find container \"55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4\": container with ID starting with 55b6f61bbc8f1d620ff4e569d11042c92f2e256b025b781deda66a532c87d6c4 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.895659 4690 scope.go:117] "RemoveContainer" containerID="c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.895958 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0"} err="failed to get container status \"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0\": rpc error: code = NotFound desc = could not find container \"c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0\": container with ID starting with c1d7f37b94296bdb6feb9581add34c92aa4a918f24b9c9ff09cd834f7cb302a0 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.895993 4690 scope.go:117] "RemoveContainer" containerID="e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.896213 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447"} err="failed to get container status \"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447\": rpc error: code = NotFound desc = could not find container \"e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447\": container with ID starting with e127231524d1976c1258e99d7c8d493e93a0a1fd2aaaa4a2b38993457c961447 not found: ID does not exist" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.932641 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.940811 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.963478 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:27 crc kubenswrapper[4690]: E0320 13:44:27.963868 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="proxy-httpd" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.963885 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="proxy-httpd" Mar 20 13:44:27 crc kubenswrapper[4690]: E0320 13:44:27.963898 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="sg-core" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.963905 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="sg-core" Mar 20 13:44:27 crc kubenswrapper[4690]: E0320 13:44:27.963921 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="ceilometer-central-agent" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.963927 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="ceilometer-central-agent" Mar 20 13:44:27 crc kubenswrapper[4690]: E0320 13:44:27.963939 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="ceilometer-notification-agent" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.963945 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="ceilometer-notification-agent" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.964112 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="ceilometer-central-agent" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.964125 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="proxy-httpd" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.964135 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="sg-core" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.964148 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" containerName="ceilometer-notification-agent" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.965698 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.969257 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.969493 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Mar 20 13:44:27 crc kubenswrapper[4690]: I0320 13:44:27.981883 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.097350 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-config-data\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.097426 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-scripts\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.097493 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-run-httpd\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.097564 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-log-httpd\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.097661 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.097723 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x29bn\" (UniqueName: \"kubernetes.io/projected/136193db-e17b-463c-b364-6dbd1d765d48-kube-api-access-x29bn\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.097761 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.199568 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-config-data\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.199646 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-scripts\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.199689 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-run-httpd\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.199743 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-log-httpd\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.199906 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.199936 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x29bn\" (UniqueName: \"kubernetes.io/projected/136193db-e17b-463c-b364-6dbd1d765d48-kube-api-access-x29bn\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.199968 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.200659 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-log-httpd\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.200741 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-run-httpd\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.204239 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-config-data\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.204939 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-scripts\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.206702 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.208109 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.220106 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x29bn\" (UniqueName: \"kubernetes.io/projected/136193db-e17b-463c-b364-6dbd1d765d48-kube-api-access-x29bn\") pod \"ceilometer-0\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.291711 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.461667 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc" path="/var/lib/kubelet/pods/25e12b07-eb86-4ff1-95c9-4fc9fe32f3cc/volumes" Mar 20 13:44:28 crc kubenswrapper[4690]: I0320 13:44:28.912475 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.033288 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-x7tz6"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.034509 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.048025 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-x7tz6"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.091322 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.130151 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-tqb96"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.131468 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.139342 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-tqb96"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.219387 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-operator-scripts\") pod \"nova-api-db-create-x7tz6\" (UID: \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\") " pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.219563 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tvff2\" (UniqueName: \"kubernetes.io/projected/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-kube-api-access-tvff2\") pod \"nova-api-db-create-x7tz6\" (UID: \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\") " pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.232200 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-619b-account-create-update-vld5x"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.233311 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.236752 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.244117 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-619b-account-create-update-vld5x"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.321721 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ffaa372-aeed-471d-b5ba-f7692e1daad8-operator-scripts\") pod \"nova-cell0-db-create-tqb96\" (UID: \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\") " pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.321823 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-operator-scripts\") pod \"nova-api-db-create-x7tz6\" (UID: \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\") " pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.321904 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wr6w\" (UniqueName: \"kubernetes.io/projected/1ffaa372-aeed-471d-b5ba-f7692e1daad8-kube-api-access-4wr6w\") pod \"nova-cell0-db-create-tqb96\" (UID: \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\") " pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.321990 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tvff2\" (UniqueName: \"kubernetes.io/projected/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-kube-api-access-tvff2\") pod \"nova-api-db-create-x7tz6\" (UID: \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\") " pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.322616 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-operator-scripts\") pod \"nova-api-db-create-x7tz6\" (UID: \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\") " pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.337698 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-p7w2k"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.339358 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.343571 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tvff2\" (UniqueName: \"kubernetes.io/projected/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-kube-api-access-tvff2\") pod \"nova-api-db-create-x7tz6\" (UID: \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\") " pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.348621 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-p7w2k"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.360627 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.425923 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ffaa372-aeed-471d-b5ba-f7692e1daad8-operator-scripts\") pod \"nova-cell0-db-create-tqb96\" (UID: \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\") " pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.425992 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-867zz\" (UniqueName: \"kubernetes.io/projected/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-kube-api-access-867zz\") pod \"nova-api-619b-account-create-update-vld5x\" (UID: \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\") " pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.426019 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-operator-scripts\") pod \"nova-api-619b-account-create-update-vld5x\" (UID: \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\") " pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.426053 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wr6w\" (UniqueName: \"kubernetes.io/projected/1ffaa372-aeed-471d-b5ba-f7692e1daad8-kube-api-access-4wr6w\") pod \"nova-cell0-db-create-tqb96\" (UID: \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\") " pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.426915 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ffaa372-aeed-471d-b5ba-f7692e1daad8-operator-scripts\") pod \"nova-cell0-db-create-tqb96\" (UID: \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\") " pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.443330 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-9b74-account-create-update-qrqkx"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.444563 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.445863 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.446194 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wr6w\" (UniqueName: \"kubernetes.io/projected/1ffaa372-aeed-471d-b5ba-f7692e1daad8-kube-api-access-4wr6w\") pod \"nova-cell0-db-create-tqb96\" (UID: \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\") " pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.451336 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.471897 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-9b74-account-create-update-qrqkx"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.527728 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7338a418-a221-409b-bafd-666e7cc66a8e-operator-scripts\") pod \"nova-cell1-db-create-p7w2k\" (UID: \"7338a418-a221-409b-bafd-666e7cc66a8e\") " pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.528143 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-867zz\" (UniqueName: \"kubernetes.io/projected/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-kube-api-access-867zz\") pod \"nova-api-619b-account-create-update-vld5x\" (UID: \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\") " pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.528192 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-operator-scripts\") pod \"nova-api-619b-account-create-update-vld5x\" (UID: \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\") " pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.529300 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wbplv\" (UniqueName: \"kubernetes.io/projected/7338a418-a221-409b-bafd-666e7cc66a8e-kube-api-access-wbplv\") pod \"nova-cell1-db-create-p7w2k\" (UID: \"7338a418-a221-409b-bafd-666e7cc66a8e\") " pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.530086 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-operator-scripts\") pod \"nova-api-619b-account-create-update-vld5x\" (UID: \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\") " pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.559565 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-867zz\" (UniqueName: \"kubernetes.io/projected/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-kube-api-access-867zz\") pod \"nova-api-619b-account-create-update-vld5x\" (UID: \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\") " pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.625024 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerStarted","Data":"19480b51f53d89c176bcef3857b014a90d92d08a4ed08ddc6e8c989e1537f715"} Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.631243 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wbplv\" (UniqueName: \"kubernetes.io/projected/7338a418-a221-409b-bafd-666e7cc66a8e-kube-api-access-wbplv\") pod \"nova-cell1-db-create-p7w2k\" (UID: \"7338a418-a221-409b-bafd-666e7cc66a8e\") " pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.631313 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9m7r\" (UniqueName: \"kubernetes.io/projected/817d49d2-79e2-42f0-b503-bd6bf78f1459-kube-api-access-l9m7r\") pod \"nova-cell0-9b74-account-create-update-qrqkx\" (UID: \"817d49d2-79e2-42f0-b503-bd6bf78f1459\") " pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.631363 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/817d49d2-79e2-42f0-b503-bd6bf78f1459-operator-scripts\") pod \"nova-cell0-9b74-account-create-update-qrqkx\" (UID: \"817d49d2-79e2-42f0-b503-bd6bf78f1459\") " pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.631392 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7338a418-a221-409b-bafd-666e7cc66a8e-operator-scripts\") pod \"nova-cell1-db-create-p7w2k\" (UID: \"7338a418-a221-409b-bafd-666e7cc66a8e\") " pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.632298 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7338a418-a221-409b-bafd-666e7cc66a8e-operator-scripts\") pod \"nova-cell1-db-create-p7w2k\" (UID: \"7338a418-a221-409b-bafd-666e7cc66a8e\") " pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.654410 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wbplv\" (UniqueName: \"kubernetes.io/projected/7338a418-a221-409b-bafd-666e7cc66a8e-kube-api-access-wbplv\") pod \"nova-cell1-db-create-p7w2k\" (UID: \"7338a418-a221-409b-bafd-666e7cc66a8e\") " pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.664071 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-1ed3-account-create-update-xqkdf"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.665310 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.668330 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.684197 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1ed3-account-create-update-xqkdf"] Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.733208 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9m7r\" (UniqueName: \"kubernetes.io/projected/817d49d2-79e2-42f0-b503-bd6bf78f1459-kube-api-access-l9m7r\") pod \"nova-cell0-9b74-account-create-update-qrqkx\" (UID: \"817d49d2-79e2-42f0-b503-bd6bf78f1459\") " pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.733641 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/817d49d2-79e2-42f0-b503-bd6bf78f1459-operator-scripts\") pod \"nova-cell0-9b74-account-create-update-qrqkx\" (UID: \"817d49d2-79e2-42f0-b503-bd6bf78f1459\") " pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.734435 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/817d49d2-79e2-42f0-b503-bd6bf78f1459-operator-scripts\") pod \"nova-cell0-9b74-account-create-update-qrqkx\" (UID: \"817d49d2-79e2-42f0-b503-bd6bf78f1459\") " pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.756088 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9m7r\" (UniqueName: \"kubernetes.io/projected/817d49d2-79e2-42f0-b503-bd6bf78f1459-kube-api-access-l9m7r\") pod \"nova-cell0-9b74-account-create-update-qrqkx\" (UID: \"817d49d2-79e2-42f0-b503-bd6bf78f1459\") " pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.797393 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.804378 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.835770 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x5t2w\" (UniqueName: \"kubernetes.io/projected/b58df85c-1bf7-41ba-9839-d74172783a24-kube-api-access-x5t2w\") pod \"nova-cell1-1ed3-account-create-update-xqkdf\" (UID: \"b58df85c-1bf7-41ba-9839-d74172783a24\") " pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.835860 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b58df85c-1bf7-41ba-9839-d74172783a24-operator-scripts\") pod \"nova-cell1-1ed3-account-create-update-xqkdf\" (UID: \"b58df85c-1bf7-41ba-9839-d74172783a24\") " pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.847161 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.937520 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x5t2w\" (UniqueName: \"kubernetes.io/projected/b58df85c-1bf7-41ba-9839-d74172783a24-kube-api-access-x5t2w\") pod \"nova-cell1-1ed3-account-create-update-xqkdf\" (UID: \"b58df85c-1bf7-41ba-9839-d74172783a24\") " pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.937650 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b58df85c-1bf7-41ba-9839-d74172783a24-operator-scripts\") pod \"nova-cell1-1ed3-account-create-update-xqkdf\" (UID: \"b58df85c-1bf7-41ba-9839-d74172783a24\") " pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.938726 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b58df85c-1bf7-41ba-9839-d74172783a24-operator-scripts\") pod \"nova-cell1-1ed3-account-create-update-xqkdf\" (UID: \"b58df85c-1bf7-41ba-9839-d74172783a24\") " pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.962953 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x5t2w\" (UniqueName: \"kubernetes.io/projected/b58df85c-1bf7-41ba-9839-d74172783a24-kube-api-access-x5t2w\") pod \"nova-cell1-1ed3-account-create-update-xqkdf\" (UID: \"b58df85c-1bf7-41ba-9839-d74172783a24\") " pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:29 crc kubenswrapper[4690]: I0320 13:44:29.976260 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-x7tz6"] Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.051204 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.190495 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-tqb96"] Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.444103 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-619b-account-create-update-vld5x"] Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.464773 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-9b74-account-create-update-qrqkx"] Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.580137 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-p7w2k"] Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.653873 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tqb96" event={"ID":"1ffaa372-aeed-471d-b5ba-f7692e1daad8","Type":"ContainerStarted","Data":"b7a94a9c4e2cbee80df5b492c305daaf930a8bb205ac42b2a2d81272b77cf189"} Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.653916 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tqb96" event={"ID":"1ffaa372-aeed-471d-b5ba-f7692e1daad8","Type":"ContainerStarted","Data":"6538b6183bb6f2e825894eb9e949d64644c6ff690000d17c1828ab71fa6596a1"} Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.656951 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-619b-account-create-update-vld5x" event={"ID":"d7435c23-ad0c-484c-bc24-5cceb7e01ccc","Type":"ContainerStarted","Data":"7f56fc60c2c118fb736e4a5488750895ea49880b7cf0d31785dfb6032ab4aee3"} Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.662344 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerStarted","Data":"982ed374248ba889066c5e194078a786b945ba0132066b6c7fab8d3e33cbf90a"} Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.662391 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerStarted","Data":"e148622b819b73225d7d154a589a27e4b76a6302f87c12d1e92a4f6019a8dcd2"} Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.663829 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" event={"ID":"817d49d2-79e2-42f0-b503-bd6bf78f1459","Type":"ContainerStarted","Data":"d4f14a2b39e9e33acac06d50c6460ecebb0025354fb5ea99f68f1b0435ee5028"} Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.664972 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p7w2k" event={"ID":"7338a418-a221-409b-bafd-666e7cc66a8e","Type":"ContainerStarted","Data":"fcd678b003ed1f9e45805db9c3192c7afb8fee673903937ac922052d893f2e95"} Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.682367 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-x7tz6" event={"ID":"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b","Type":"ContainerStarted","Data":"f6f68c193d54c40052e78503db7d7888563cd274fa14af6f0f4e7ee67f2589e2"} Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.682372 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-tqb96" podStartSLOduration=1.682357467 podStartE2EDuration="1.682357467s" podCreationTimestamp="2026-03-20 13:44:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:44:30.676165431 +0000 UTC m=+1316.965765374" watchObservedRunningTime="2026-03-20 13:44:30.682357467 +0000 UTC m=+1316.971957410" Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.682413 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-x7tz6" event={"ID":"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b","Type":"ContainerStarted","Data":"e7dd6e36045446a01dc5f2357a8a94f53a9f10e839bb639b9d2ef2afc7847ecd"} Mar 20 13:44:30 crc kubenswrapper[4690]: I0320 13:44:30.720539 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-1ed3-account-create-update-xqkdf"] Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.693672 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerStarted","Data":"a99091e52d2a4ec8c112e902e034866fac1e53ef7cb36314e65c54a3b60b5b98"} Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.697150 4690 generic.go:334] "Generic (PLEG): container finished" podID="817d49d2-79e2-42f0-b503-bd6bf78f1459" containerID="94cdd72e96f87b4b2ec8d1c9b4e7ef5245d3914543d084cb1717fc341d41f9af" exitCode=0 Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.697206 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" event={"ID":"817d49d2-79e2-42f0-b503-bd6bf78f1459","Type":"ContainerDied","Data":"94cdd72e96f87b4b2ec8d1c9b4e7ef5245d3914543d084cb1717fc341d41f9af"} Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.706549 4690 generic.go:334] "Generic (PLEG): container finished" podID="7338a418-a221-409b-bafd-666e7cc66a8e" containerID="3aa218be34916d51eb763aaf84856395e4fbc132ed761cd58796e55dbefd4b25" exitCode=0 Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.706647 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p7w2k" event={"ID":"7338a418-a221-409b-bafd-666e7cc66a8e","Type":"ContainerDied","Data":"3aa218be34916d51eb763aaf84856395e4fbc132ed761cd58796e55dbefd4b25"} Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.708238 4690 generic.go:334] "Generic (PLEG): container finished" podID="a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b" containerID="f6f68c193d54c40052e78503db7d7888563cd274fa14af6f0f4e7ee67f2589e2" exitCode=0 Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.708305 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-x7tz6" event={"ID":"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b","Type":"ContainerDied","Data":"f6f68c193d54c40052e78503db7d7888563cd274fa14af6f0f4e7ee67f2589e2"} Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.713475 4690 generic.go:334] "Generic (PLEG): container finished" podID="70037527-dc76-4c31-9841-6cc6b27fe032" containerID="1ee497351d53e8a9f9a8191685b5badaba8117a7b558f7bb5ab2d0b931252a50" exitCode=0 Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.713534 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65b6bf5884-9kvqh" event={"ID":"70037527-dc76-4c31-9841-6cc6b27fe032","Type":"ContainerDied","Data":"1ee497351d53e8a9f9a8191685b5badaba8117a7b558f7bb5ab2d0b931252a50"} Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.717249 4690 generic.go:334] "Generic (PLEG): container finished" podID="1ffaa372-aeed-471d-b5ba-f7692e1daad8" containerID="b7a94a9c4e2cbee80df5b492c305daaf930a8bb205ac42b2a2d81272b77cf189" exitCode=0 Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.717468 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tqb96" event={"ID":"1ffaa372-aeed-471d-b5ba-f7692e1daad8","Type":"ContainerDied","Data":"b7a94a9c4e2cbee80df5b492c305daaf930a8bb205ac42b2a2d81272b77cf189"} Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.723538 4690 generic.go:334] "Generic (PLEG): container finished" podID="b58df85c-1bf7-41ba-9839-d74172783a24" containerID="788a6967c75f9832550ea8889692f14c85f612ff484fec3a904580e18e06beb1" exitCode=0 Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.723612 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" event={"ID":"b58df85c-1bf7-41ba-9839-d74172783a24","Type":"ContainerDied","Data":"788a6967c75f9832550ea8889692f14c85f612ff484fec3a904580e18e06beb1"} Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.723645 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" event={"ID":"b58df85c-1bf7-41ba-9839-d74172783a24","Type":"ContainerStarted","Data":"fb9b7ca3008e651ae8b2c3e7fb4b0b6c9eefed914e7ce893358bfef90e6116d5"} Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.742569 4690 generic.go:334] "Generic (PLEG): container finished" podID="d7435c23-ad0c-484c-bc24-5cceb7e01ccc" containerID="40641d2f53160702c35271b9e674c92dc2503c78579cb86299d72e4cbc4b112d" exitCode=0 Mar 20 13:44:31 crc kubenswrapper[4690]: I0320 13:44:31.742689 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-619b-account-create-update-vld5x" event={"ID":"d7435c23-ad0c-484c-bc24-5cceb7e01ccc","Type":"ContainerDied","Data":"40641d2f53160702c35271b9e674c92dc2503c78579cb86299d72e4cbc4b112d"} Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.026516 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.117295 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-httpd-config\") pod \"70037527-dc76-4c31-9841-6cc6b27fe032\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.117457 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-config\") pod \"70037527-dc76-4c31-9841-6cc6b27fe032\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.117493 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrt2d\" (UniqueName: \"kubernetes.io/projected/70037527-dc76-4c31-9841-6cc6b27fe032-kube-api-access-zrt2d\") pod \"70037527-dc76-4c31-9841-6cc6b27fe032\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.117534 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-ovndb-tls-certs\") pod \"70037527-dc76-4c31-9841-6cc6b27fe032\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.117576 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-combined-ca-bundle\") pod \"70037527-dc76-4c31-9841-6cc6b27fe032\" (UID: \"70037527-dc76-4c31-9841-6cc6b27fe032\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.127041 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "70037527-dc76-4c31-9841-6cc6b27fe032" (UID: "70037527-dc76-4c31-9841-6cc6b27fe032"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.146985 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70037527-dc76-4c31-9841-6cc6b27fe032-kube-api-access-zrt2d" (OuterVolumeSpecName: "kube-api-access-zrt2d") pod "70037527-dc76-4c31-9841-6cc6b27fe032" (UID: "70037527-dc76-4c31-9841-6cc6b27fe032"). InnerVolumeSpecName "kube-api-access-zrt2d". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.191516 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-config" (OuterVolumeSpecName: "config") pod "70037527-dc76-4c31-9841-6cc6b27fe032" (UID: "70037527-dc76-4c31-9841-6cc6b27fe032"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.205910 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70037527-dc76-4c31-9841-6cc6b27fe032" (UID: "70037527-dc76-4c31-9841-6cc6b27fe032"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.219317 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.219342 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrt2d\" (UniqueName: \"kubernetes.io/projected/70037527-dc76-4c31-9841-6cc6b27fe032-kube-api-access-zrt2d\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.219353 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.219363 4690 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-httpd-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.227673 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "70037527-dc76-4c31-9841-6cc6b27fe032" (UID: "70037527-dc76-4c31-9841-6cc6b27fe032"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.314508 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.320539 4690 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/70037527-dc76-4c31-9841-6cc6b27fe032-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.421890 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tvff2\" (UniqueName: \"kubernetes.io/projected/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-kube-api-access-tvff2\") pod \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\" (UID: \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.422361 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-operator-scripts\") pod \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\" (UID: \"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.423569 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b" (UID: "a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.425420 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-kube-api-access-tvff2" (OuterVolumeSpecName: "kube-api-access-tvff2") pod "a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b" (UID: "a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b"). InnerVolumeSpecName "kube-api-access-tvff2". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.524280 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.524307 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tvff2\" (UniqueName: \"kubernetes.io/projected/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b-kube-api-access-tvff2\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.542755 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.624898 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-scripts\") pod \"ae74738f-0b10-4955-97fb-e892ca7102a0\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.625044 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-tls-certs\") pod \"ae74738f-0b10-4955-97fb-e892ca7102a0\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.625125 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae74738f-0b10-4955-97fb-e892ca7102a0-logs\") pod \"ae74738f-0b10-4955-97fb-e892ca7102a0\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.625164 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-config-data\") pod \"ae74738f-0b10-4955-97fb-e892ca7102a0\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.625209 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4ltl6\" (UniqueName: \"kubernetes.io/projected/ae74738f-0b10-4955-97fb-e892ca7102a0-kube-api-access-4ltl6\") pod \"ae74738f-0b10-4955-97fb-e892ca7102a0\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.625331 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-combined-ca-bundle\") pod \"ae74738f-0b10-4955-97fb-e892ca7102a0\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.625433 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-secret-key\") pod \"ae74738f-0b10-4955-97fb-e892ca7102a0\" (UID: \"ae74738f-0b10-4955-97fb-e892ca7102a0\") " Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.625656 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae74738f-0b10-4955-97fb-e892ca7102a0-logs" (OuterVolumeSpecName: "logs") pod "ae74738f-0b10-4955-97fb-e892ca7102a0" (UID: "ae74738f-0b10-4955-97fb-e892ca7102a0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.626341 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae74738f-0b10-4955-97fb-e892ca7102a0-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.630959 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae74738f-0b10-4955-97fb-e892ca7102a0-kube-api-access-4ltl6" (OuterVolumeSpecName: "kube-api-access-4ltl6") pod "ae74738f-0b10-4955-97fb-e892ca7102a0" (UID: "ae74738f-0b10-4955-97fb-e892ca7102a0"). InnerVolumeSpecName "kube-api-access-4ltl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.636319 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "ae74738f-0b10-4955-97fb-e892ca7102a0" (UID: "ae74738f-0b10-4955-97fb-e892ca7102a0"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.655700 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-scripts" (OuterVolumeSpecName: "scripts") pod "ae74738f-0b10-4955-97fb-e892ca7102a0" (UID: "ae74738f-0b10-4955-97fb-e892ca7102a0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.657135 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-config-data" (OuterVolumeSpecName: "config-data") pod "ae74738f-0b10-4955-97fb-e892ca7102a0" (UID: "ae74738f-0b10-4955-97fb-e892ca7102a0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.670786 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ae74738f-0b10-4955-97fb-e892ca7102a0" (UID: "ae74738f-0b10-4955-97fb-e892ca7102a0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.684600 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "ae74738f-0b10-4955-97fb-e892ca7102a0" (UID: "ae74738f-0b10-4955-97fb-e892ca7102a0"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.728338 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.728602 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4ltl6\" (UniqueName: \"kubernetes.io/projected/ae74738f-0b10-4955-97fb-e892ca7102a0-kube-api-access-4ltl6\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.728614 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.728624 4690 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.728632 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae74738f-0b10-4955-97fb-e892ca7102a0-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.728640 4690 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae74738f-0b10-4955-97fb-e892ca7102a0-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.773544 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-x7tz6" event={"ID":"a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b","Type":"ContainerDied","Data":"e7dd6e36045446a01dc5f2357a8a94f53a9f10e839bb639b9d2ef2afc7847ecd"} Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.773583 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e7dd6e36045446a01dc5f2357a8a94f53a9f10e839bb639b9d2ef2afc7847ecd" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.773667 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-x7tz6" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.785077 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-65b6bf5884-9kvqh" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.785165 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-65b6bf5884-9kvqh" event={"ID":"70037527-dc76-4c31-9841-6cc6b27fe032","Type":"ContainerDied","Data":"0cd787c465eb335457020ce6b22281ed4fdb63d2c9ee2d062ac8bd5a51c117b7"} Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.785231 4690 scope.go:117] "RemoveContainer" containerID="beccd301a63fd5b4871cde64deb7972cdfb711b02983ef69bd12f554c35d77ad" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.790555 4690 generic.go:334] "Generic (PLEG): container finished" podID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerID="b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88" exitCode=137 Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.791104 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-587c585984-xs7nl" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.791153 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-587c585984-xs7nl" event={"ID":"ae74738f-0b10-4955-97fb-e892ca7102a0","Type":"ContainerDied","Data":"b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88"} Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.791209 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-587c585984-xs7nl" event={"ID":"ae74738f-0b10-4955-97fb-e892ca7102a0","Type":"ContainerDied","Data":"6fd144d885bf415183b901f7a932e68b590fe359126cb026f695a082096a10a7"} Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.832271 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-65b6bf5884-9kvqh"] Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.847203 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-65b6bf5884-9kvqh"] Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.847404 4690 scope.go:117] "RemoveContainer" containerID="1ee497351d53e8a9f9a8191685b5badaba8117a7b558f7bb5ab2d0b931252a50" Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.858173 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-587c585984-xs7nl"] Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.875868 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-587c585984-xs7nl"] Mar 20 13:44:32 crc kubenswrapper[4690]: I0320 13:44:32.894838 4690 scope.go:117] "RemoveContainer" containerID="05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.121963 4690 scope.go:117] "RemoveContainer" containerID="b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.311932 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.312530 4690 scope.go:117] "RemoveContainer" containerID="05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34" Mar 20 13:44:33 crc kubenswrapper[4690]: E0320 13:44:33.312732 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34\": container with ID starting with 05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34 not found: ID does not exist" containerID="05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.312755 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34"} err="failed to get container status \"05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34\": rpc error: code = NotFound desc = could not find container \"05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34\": container with ID starting with 05923b4a8f18b4242000e9877e2ae0bb953a5977b0e7fc6260fa7b6162bdba34 not found: ID does not exist" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.312775 4690 scope.go:117] "RemoveContainer" containerID="b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88" Mar 20 13:44:33 crc kubenswrapper[4690]: E0320 13:44:33.312953 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88\": container with ID starting with b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88 not found: ID does not exist" containerID="b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.312977 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88"} err="failed to get container status \"b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88\": rpc error: code = NotFound desc = could not find container \"b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88\": container with ID starting with b0026d2dd94b8c97599116a03bba5e849e1bf28f3784790fe62bb5d8a779ce88 not found: ID does not exist" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.359865 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7338a418-a221-409b-bafd-666e7cc66a8e-operator-scripts\") pod \"7338a418-a221-409b-bafd-666e7cc66a8e\" (UID: \"7338a418-a221-409b-bafd-666e7cc66a8e\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.360006 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wbplv\" (UniqueName: \"kubernetes.io/projected/7338a418-a221-409b-bafd-666e7cc66a8e-kube-api-access-wbplv\") pod \"7338a418-a221-409b-bafd-666e7cc66a8e\" (UID: \"7338a418-a221-409b-bafd-666e7cc66a8e\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.360862 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7338a418-a221-409b-bafd-666e7cc66a8e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "7338a418-a221-409b-bafd-666e7cc66a8e" (UID: "7338a418-a221-409b-bafd-666e7cc66a8e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.365959 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7338a418-a221-409b-bafd-666e7cc66a8e-kube-api-access-wbplv" (OuterVolumeSpecName: "kube-api-access-wbplv") pod "7338a418-a221-409b-bafd-666e7cc66a8e" (UID: "7338a418-a221-409b-bafd-666e7cc66a8e"). InnerVolumeSpecName "kube-api-access-wbplv". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.406110 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.438194 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.439194 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.455295 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.466104 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-operator-scripts\") pod \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\" (UID: \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.466173 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-867zz\" (UniqueName: \"kubernetes.io/projected/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-kube-api-access-867zz\") pod \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\" (UID: \"d7435c23-ad0c-484c-bc24-5cceb7e01ccc\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.466718 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7338a418-a221-409b-bafd-666e7cc66a8e-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.466735 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wbplv\" (UniqueName: \"kubernetes.io/projected/7338a418-a221-409b-bafd-666e7cc66a8e-kube-api-access-wbplv\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.468022 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d7435c23-ad0c-484c-bc24-5cceb7e01ccc" (UID: "d7435c23-ad0c-484c-bc24-5cceb7e01ccc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.472909 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-kube-api-access-867zz" (OuterVolumeSpecName: "kube-api-access-867zz") pod "d7435c23-ad0c-484c-bc24-5cceb7e01ccc" (UID: "d7435c23-ad0c-484c-bc24-5cceb7e01ccc"). InnerVolumeSpecName "kube-api-access-867zz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.567829 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b58df85c-1bf7-41ba-9839-d74172783a24-operator-scripts\") pod \"b58df85c-1bf7-41ba-9839-d74172783a24\" (UID: \"b58df85c-1bf7-41ba-9839-d74172783a24\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.567915 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/817d49d2-79e2-42f0-b503-bd6bf78f1459-operator-scripts\") pod \"817d49d2-79e2-42f0-b503-bd6bf78f1459\" (UID: \"817d49d2-79e2-42f0-b503-bd6bf78f1459\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.568290 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ffaa372-aeed-471d-b5ba-f7692e1daad8-operator-scripts\") pod \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\" (UID: \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.568358 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/817d49d2-79e2-42f0-b503-bd6bf78f1459-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "817d49d2-79e2-42f0-b503-bd6bf78f1459" (UID: "817d49d2-79e2-42f0-b503-bd6bf78f1459"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.568495 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b58df85c-1bf7-41ba-9839-d74172783a24-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "b58df85c-1bf7-41ba-9839-d74172783a24" (UID: "b58df85c-1bf7-41ba-9839-d74172783a24"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.568694 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wr6w\" (UniqueName: \"kubernetes.io/projected/1ffaa372-aeed-471d-b5ba-f7692e1daad8-kube-api-access-4wr6w\") pod \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\" (UID: \"1ffaa372-aeed-471d-b5ba-f7692e1daad8\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.568835 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9m7r\" (UniqueName: \"kubernetes.io/projected/817d49d2-79e2-42f0-b503-bd6bf78f1459-kube-api-access-l9m7r\") pod \"817d49d2-79e2-42f0-b503-bd6bf78f1459\" (UID: \"817d49d2-79e2-42f0-b503-bd6bf78f1459\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.568866 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1ffaa372-aeed-471d-b5ba-f7692e1daad8-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1ffaa372-aeed-471d-b5ba-f7692e1daad8" (UID: "1ffaa372-aeed-471d-b5ba-f7692e1daad8"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.568885 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x5t2w\" (UniqueName: \"kubernetes.io/projected/b58df85c-1bf7-41ba-9839-d74172783a24-kube-api-access-x5t2w\") pod \"b58df85c-1bf7-41ba-9839-d74172783a24\" (UID: \"b58df85c-1bf7-41ba-9839-d74172783a24\") " Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.569864 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-867zz\" (UniqueName: \"kubernetes.io/projected/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-kube-api-access-867zz\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.569881 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b58df85c-1bf7-41ba-9839-d74172783a24-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.569889 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/817d49d2-79e2-42f0-b503-bd6bf78f1459-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.569896 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1ffaa372-aeed-471d-b5ba-f7692e1daad8-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.569905 4690 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d7435c23-ad0c-484c-bc24-5cceb7e01ccc-operator-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.571948 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ffaa372-aeed-471d-b5ba-f7692e1daad8-kube-api-access-4wr6w" (OuterVolumeSpecName: "kube-api-access-4wr6w") pod "1ffaa372-aeed-471d-b5ba-f7692e1daad8" (UID: "1ffaa372-aeed-471d-b5ba-f7692e1daad8"). InnerVolumeSpecName "kube-api-access-4wr6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.572390 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b58df85c-1bf7-41ba-9839-d74172783a24-kube-api-access-x5t2w" (OuterVolumeSpecName: "kube-api-access-x5t2w") pod "b58df85c-1bf7-41ba-9839-d74172783a24" (UID: "b58df85c-1bf7-41ba-9839-d74172783a24"). InnerVolumeSpecName "kube-api-access-x5t2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.574035 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/817d49d2-79e2-42f0-b503-bd6bf78f1459-kube-api-access-l9m7r" (OuterVolumeSpecName: "kube-api-access-l9m7r") pod "817d49d2-79e2-42f0-b503-bd6bf78f1459" (UID: "817d49d2-79e2-42f0-b503-bd6bf78f1459"). InnerVolumeSpecName "kube-api-access-l9m7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.671813 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wr6w\" (UniqueName: \"kubernetes.io/projected/1ffaa372-aeed-471d-b5ba-f7692e1daad8-kube-api-access-4wr6w\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.672150 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9m7r\" (UniqueName: \"kubernetes.io/projected/817d49d2-79e2-42f0-b503-bd6bf78f1459-kube-api-access-l9m7r\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.672239 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x5t2w\" (UniqueName: \"kubernetes.io/projected/b58df85c-1bf7-41ba-9839-d74172783a24-kube-api-access-x5t2w\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.798718 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-tqb96" event={"ID":"1ffaa372-aeed-471d-b5ba-f7692e1daad8","Type":"ContainerDied","Data":"6538b6183bb6f2e825894eb9e949d64644c6ff690000d17c1828ab71fa6596a1"} Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.798758 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6538b6183bb6f2e825894eb9e949d64644c6ff690000d17c1828ab71fa6596a1" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.798734 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-tqb96" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.801077 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" event={"ID":"b58df85c-1bf7-41ba-9839-d74172783a24","Type":"ContainerDied","Data":"fb9b7ca3008e651ae8b2c3e7fb4b0b6c9eefed914e7ce893358bfef90e6116d5"} Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.801199 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb9b7ca3008e651ae8b2c3e7fb4b0b6c9eefed914e7ce893358bfef90e6116d5" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.801997 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-1ed3-account-create-update-xqkdf" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.802531 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-619b-account-create-update-vld5x" event={"ID":"d7435c23-ad0c-484c-bc24-5cceb7e01ccc","Type":"ContainerDied","Data":"7f56fc60c2c118fb736e4a5488750895ea49880b7cf0d31785dfb6032ab4aee3"} Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.802553 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f56fc60c2c118fb736e4a5488750895ea49880b7cf0d31785dfb6032ab4aee3" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.802587 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-619b-account-create-update-vld5x" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.812953 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerStarted","Data":"5ba91f69620e8669f9b646f6661c3ad8042bc475249908c3b10695365a47891b"} Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.813247 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="ceilometer-central-agent" containerID="cri-o://e148622b819b73225d7d154a589a27e4b76a6302f87c12d1e92a4f6019a8dcd2" gracePeriod=30 Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.813519 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.813517 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="sg-core" containerID="cri-o://a99091e52d2a4ec8c112e902e034866fac1e53ef7cb36314e65c54a3b60b5b98" gracePeriod=30 Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.813559 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="ceilometer-notification-agent" containerID="cri-o://982ed374248ba889066c5e194078a786b945ba0132066b6c7fab8d3e33cbf90a" gracePeriod=30 Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.813542 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="proxy-httpd" containerID="cri-o://5ba91f69620e8669f9b646f6661c3ad8042bc475249908c3b10695365a47891b" gracePeriod=30 Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.819699 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" event={"ID":"817d49d2-79e2-42f0-b503-bd6bf78f1459","Type":"ContainerDied","Data":"d4f14a2b39e9e33acac06d50c6460ecebb0025354fb5ea99f68f1b0435ee5028"} Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.819730 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d4f14a2b39e9e33acac06d50c6460ecebb0025354fb5ea99f68f1b0435ee5028" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.819786 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-9b74-account-create-update-qrqkx" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.832868 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.833135 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.833832 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-p7w2k" event={"ID":"7338a418-a221-409b-bafd-666e7cc66a8e","Type":"ContainerDied","Data":"fcd678b003ed1f9e45805db9c3192c7afb8fee673903937ac922052d893f2e95"} Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.833990 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fcd678b003ed1f9e45805db9c3192c7afb8fee673903937ac922052d893f2e95" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.834108 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-p7w2k" Mar 20 13:44:33 crc kubenswrapper[4690]: I0320 13:44:33.841137 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.565365378 podStartE2EDuration="6.841117964s" podCreationTimestamp="2026-03-20 13:44:27 +0000 UTC" firstStartedPulling="2026-03-20 13:44:28.918663592 +0000 UTC m=+1315.208263535" lastFinishedPulling="2026-03-20 13:44:33.194416178 +0000 UTC m=+1319.484016121" observedRunningTime="2026-03-20 13:44:33.832513089 +0000 UTC m=+1320.122113032" watchObservedRunningTime="2026-03-20 13:44:33.841117964 +0000 UTC m=+1320.130717917" Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.386959 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.432866 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70037527-dc76-4c31-9841-6cc6b27fe032" path="/var/lib/kubelet/pods/70037527-dc76-4c31-9841-6cc6b27fe032/volumes" Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.434066 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" path="/var/lib/kubelet/pods/ae74738f-0b10-4955-97fb-e892ca7102a0/volumes" Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.462330 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-b994f67f8-wh5fd" Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.580327 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-94bccb6f6-kk87d"] Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.581148 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-94bccb6f6-kk87d" podUID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerName="placement-log" containerID="cri-o://7bbc8a49430ac1bcb64f9e5094fe0d4e5e4f42ca1d09cadc16710f57055b6798" gracePeriod=30 Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.587241 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/placement-94bccb6f6-kk87d" podUID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerName="placement-api" containerID="cri-o://d75dc827af97983c938e5fdd672557ac4733979363f2210d2b851349544f20bf" gracePeriod=30 Mar 20 13:44:34 crc kubenswrapper[4690]: E0320 13:44:34.808776 4690 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4bce072_cb50_4167_92cc_eab9c8501d2d.slice/crio-7bbc8a49430ac1bcb64f9e5094fe0d4e5e4f42ca1d09cadc16710f57055b6798.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4bce072_cb50_4167_92cc_eab9c8501d2d.slice/crio-conmon-7bbc8a49430ac1bcb64f9e5094fe0d4e5e4f42ca1d09cadc16710f57055b6798.scope\": RecentStats: unable to find data in memory cache]" Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.864820 4690 generic.go:334] "Generic (PLEG): container finished" podID="136193db-e17b-463c-b364-6dbd1d765d48" containerID="5ba91f69620e8669f9b646f6661c3ad8042bc475249908c3b10695365a47891b" exitCode=0 Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.864891 4690 generic.go:334] "Generic (PLEG): container finished" podID="136193db-e17b-463c-b364-6dbd1d765d48" containerID="a99091e52d2a4ec8c112e902e034866fac1e53ef7cb36314e65c54a3b60b5b98" exitCode=2 Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.864905 4690 generic.go:334] "Generic (PLEG): container finished" podID="136193db-e17b-463c-b364-6dbd1d765d48" containerID="982ed374248ba889066c5e194078a786b945ba0132066b6c7fab8d3e33cbf90a" exitCode=0 Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.864895 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerDied","Data":"5ba91f69620e8669f9b646f6661c3ad8042bc475249908c3b10695365a47891b"} Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.864940 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerDied","Data":"a99091e52d2a4ec8c112e902e034866fac1e53ef7cb36314e65c54a3b60b5b98"} Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.864952 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerDied","Data":"982ed374248ba889066c5e194078a786b945ba0132066b6c7fab8d3e33cbf90a"} Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.867480 4690 generic.go:334] "Generic (PLEG): container finished" podID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerID="7bbc8a49430ac1bcb64f9e5094fe0d4e5e4f42ca1d09cadc16710f57055b6798" exitCode=143 Mar 20 13:44:34 crc kubenswrapper[4690]: I0320 13:44:34.867540 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94bccb6f6-kk87d" event={"ID":"d4bce072-cb50-4167-92cc-eab9c8501d2d","Type":"ContainerDied","Data":"7bbc8a49430ac1bcb64f9e5094fe0d4e5e4f42ca1d09cadc16710f57055b6798"} Mar 20 13:44:37 crc kubenswrapper[4690]: I0320 13:44:37.916776 4690 generic.go:334] "Generic (PLEG): container finished" podID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerID="d75dc827af97983c938e5fdd672557ac4733979363f2210d2b851349544f20bf" exitCode=0 Mar 20 13:44:37 crc kubenswrapper[4690]: I0320 13:44:37.917000 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94bccb6f6-kk87d" event={"ID":"d4bce072-cb50-4167-92cc-eab9c8501d2d","Type":"ContainerDied","Data":"d75dc827af97983c938e5fdd672557ac4733979363f2210d2b851349544f20bf"} Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.125862 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.159175 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vjbz\" (UniqueName: \"kubernetes.io/projected/d4bce072-cb50-4167-92cc-eab9c8501d2d-kube-api-access-4vjbz\") pod \"d4bce072-cb50-4167-92cc-eab9c8501d2d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.159239 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-internal-tls-certs\") pod \"d4bce072-cb50-4167-92cc-eab9c8501d2d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.159308 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bce072-cb50-4167-92cc-eab9c8501d2d-logs\") pod \"d4bce072-cb50-4167-92cc-eab9c8501d2d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.159327 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-public-tls-certs\") pod \"d4bce072-cb50-4167-92cc-eab9c8501d2d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.159349 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-combined-ca-bundle\") pod \"d4bce072-cb50-4167-92cc-eab9c8501d2d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.159499 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-config-data\") pod \"d4bce072-cb50-4167-92cc-eab9c8501d2d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.159538 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-scripts\") pod \"d4bce072-cb50-4167-92cc-eab9c8501d2d\" (UID: \"d4bce072-cb50-4167-92cc-eab9c8501d2d\") " Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.163415 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4bce072-cb50-4167-92cc-eab9c8501d2d-logs" (OuterVolumeSpecName: "logs") pod "d4bce072-cb50-4167-92cc-eab9c8501d2d" (UID: "d4bce072-cb50-4167-92cc-eab9c8501d2d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.168229 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4bce072-cb50-4167-92cc-eab9c8501d2d-kube-api-access-4vjbz" (OuterVolumeSpecName: "kube-api-access-4vjbz") pod "d4bce072-cb50-4167-92cc-eab9c8501d2d" (UID: "d4bce072-cb50-4167-92cc-eab9c8501d2d"). InnerVolumeSpecName "kube-api-access-4vjbz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.171998 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-scripts" (OuterVolumeSpecName: "scripts") pod "d4bce072-cb50-4167-92cc-eab9c8501d2d" (UID: "d4bce072-cb50-4167-92cc-eab9c8501d2d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.221230 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-config-data" (OuterVolumeSpecName: "config-data") pod "d4bce072-cb50-4167-92cc-eab9c8501d2d" (UID: "d4bce072-cb50-4167-92cc-eab9c8501d2d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.231955 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4bce072-cb50-4167-92cc-eab9c8501d2d" (UID: "d4bce072-cb50-4167-92cc-eab9c8501d2d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.262656 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.262702 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.262716 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vjbz\" (UniqueName: \"kubernetes.io/projected/d4bce072-cb50-4167-92cc-eab9c8501d2d-kube-api-access-4vjbz\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.262732 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4bce072-cb50-4167-92cc-eab9c8501d2d-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.262749 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.274757 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "d4bce072-cb50-4167-92cc-eab9c8501d2d" (UID: "d4bce072-cb50-4167-92cc-eab9c8501d2d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.297321 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "d4bce072-cb50-4167-92cc-eab9c8501d2d" (UID: "d4bce072-cb50-4167-92cc-eab9c8501d2d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.364612 4690 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.364641 4690 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4bce072-cb50-4167-92cc-eab9c8501d2d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.927525 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-94bccb6f6-kk87d" event={"ID":"d4bce072-cb50-4167-92cc-eab9c8501d2d","Type":"ContainerDied","Data":"a93071143859a1d7a3264d23abb7d7008733fb3798f964f10419cac7bbd6f05d"} Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.927588 4690 scope.go:117] "RemoveContainer" containerID="d75dc827af97983c938e5fdd672557ac4733979363f2210d2b851349544f20bf" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.927753 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-94bccb6f6-kk87d" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.960539 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-94bccb6f6-kk87d"] Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.966299 4690 scope.go:117] "RemoveContainer" containerID="7bbc8a49430ac1bcb64f9e5094fe0d4e5e4f42ca1d09cadc16710f57055b6798" Mar 20 13:44:38 crc kubenswrapper[4690]: I0320 13:44:38.969260 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-94bccb6f6-kk87d"] Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.702717 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fvfgz"] Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703355 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon-log" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703372 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon-log" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703382 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerName="placement-api" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703389 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerName="placement-api" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703398 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerName="placement-log" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703405 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerName="placement-log" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703416 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b58df85c-1bf7-41ba-9839-d74172783a24" containerName="mariadb-account-create-update" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703421 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="b58df85c-1bf7-41ba-9839-d74172783a24" containerName="mariadb-account-create-update" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703434 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70037527-dc76-4c31-9841-6cc6b27fe032" containerName="neutron-httpd" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703440 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="70037527-dc76-4c31-9841-6cc6b27fe032" containerName="neutron-httpd" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703465 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703472 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703481 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7338a418-a221-409b-bafd-666e7cc66a8e" containerName="mariadb-database-create" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703487 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7338a418-a221-409b-bafd-666e7cc66a8e" containerName="mariadb-database-create" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703505 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d7435c23-ad0c-484c-bc24-5cceb7e01ccc" containerName="mariadb-account-create-update" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703512 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d7435c23-ad0c-484c-bc24-5cceb7e01ccc" containerName="mariadb-account-create-update" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703521 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70037527-dc76-4c31-9841-6cc6b27fe032" containerName="neutron-api" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703527 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="70037527-dc76-4c31-9841-6cc6b27fe032" containerName="neutron-api" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703536 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="817d49d2-79e2-42f0-b503-bd6bf78f1459" containerName="mariadb-account-create-update" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703542 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="817d49d2-79e2-42f0-b503-bd6bf78f1459" containerName="mariadb-account-create-update" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703549 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b" containerName="mariadb-database-create" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703556 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b" containerName="mariadb-database-create" Mar 20 13:44:39 crc kubenswrapper[4690]: E0320 13:44:39.703568 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ffaa372-aeed-471d-b5ba-f7692e1daad8" containerName="mariadb-database-create" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703573 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ffaa372-aeed-471d-b5ba-f7692e1daad8" containerName="mariadb-database-create" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703723 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="7338a418-a221-409b-bafd-666e7cc66a8e" containerName="mariadb-database-create" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703732 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="70037527-dc76-4c31-9841-6cc6b27fe032" containerName="neutron-api" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703742 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b" containerName="mariadb-database-create" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703749 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d7435c23-ad0c-484c-bc24-5cceb7e01ccc" containerName="mariadb-account-create-update" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703757 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="817d49d2-79e2-42f0-b503-bd6bf78f1459" containerName="mariadb-account-create-update" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703769 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703779 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="b58df85c-1bf7-41ba-9839-d74172783a24" containerName="mariadb-account-create-update" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703790 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerName="placement-log" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703800 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="70037527-dc76-4c31-9841-6cc6b27fe032" containerName="neutron-httpd" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703807 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ffaa372-aeed-471d-b5ba-f7692e1daad8" containerName="mariadb-database-create" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703816 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae74738f-0b10-4955-97fb-e892ca7102a0" containerName="horizon-log" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.703825 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4bce072-cb50-4167-92cc-eab9c8501d2d" containerName="placement-api" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.704369 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.705659 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-m8lsr" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.708834 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.708998 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.725261 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fvfgz"] Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.796399 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-scripts\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.796503 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.796555 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-config-data\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.796617 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-66g97\" (UniqueName: \"kubernetes.io/projected/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-kube-api-access-66g97\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.898212 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-scripts\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.898317 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.898362 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-config-data\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.898394 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-66g97\" (UniqueName: \"kubernetes.io/projected/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-kube-api-access-66g97\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.904086 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-scripts\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.907536 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.912235 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-config-data\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.916761 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-66g97\" (UniqueName: \"kubernetes.io/projected/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-kube-api-access-66g97\") pod \"nova-cell0-conductor-db-sync-fvfgz\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.941391 4690 generic.go:334] "Generic (PLEG): container finished" podID="136193db-e17b-463c-b364-6dbd1d765d48" containerID="e148622b819b73225d7d154a589a27e4b76a6302f87c12d1e92a4f6019a8dcd2" exitCode=0 Mar 20 13:44:39 crc kubenswrapper[4690]: I0320 13:44:39.941509 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerDied","Data":"e148622b819b73225d7d154a589a27e4b76a6302f87c12d1e92a4f6019a8dcd2"} Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.026226 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.317517 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.424872 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4bce072-cb50-4167-92cc-eab9c8501d2d" path="/var/lib/kubelet/pods/d4bce072-cb50-4167-92cc-eab9c8501d2d/volumes" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.508354 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-log-httpd\") pod \"136193db-e17b-463c-b364-6dbd1d765d48\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.508464 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x29bn\" (UniqueName: \"kubernetes.io/projected/136193db-e17b-463c-b364-6dbd1d765d48-kube-api-access-x29bn\") pod \"136193db-e17b-463c-b364-6dbd1d765d48\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.508517 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-run-httpd\") pod \"136193db-e17b-463c-b364-6dbd1d765d48\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.508602 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-config-data\") pod \"136193db-e17b-463c-b364-6dbd1d765d48\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.508739 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-combined-ca-bundle\") pod \"136193db-e17b-463c-b364-6dbd1d765d48\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.508762 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-sg-core-conf-yaml\") pod \"136193db-e17b-463c-b364-6dbd1d765d48\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.508817 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-scripts\") pod \"136193db-e17b-463c-b364-6dbd1d765d48\" (UID: \"136193db-e17b-463c-b364-6dbd1d765d48\") " Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.509003 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "136193db-e17b-463c-b364-6dbd1d765d48" (UID: "136193db-e17b-463c-b364-6dbd1d765d48"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.509041 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "136193db-e17b-463c-b364-6dbd1d765d48" (UID: "136193db-e17b-463c-b364-6dbd1d765d48"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.509753 4690 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.509778 4690 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/136193db-e17b-463c-b364-6dbd1d765d48-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.514188 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/136193db-e17b-463c-b364-6dbd1d765d48-kube-api-access-x29bn" (OuterVolumeSpecName: "kube-api-access-x29bn") pod "136193db-e17b-463c-b364-6dbd1d765d48" (UID: "136193db-e17b-463c-b364-6dbd1d765d48"). InnerVolumeSpecName "kube-api-access-x29bn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.516135 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-scripts" (OuterVolumeSpecName: "scripts") pod "136193db-e17b-463c-b364-6dbd1d765d48" (UID: "136193db-e17b-463c-b364-6dbd1d765d48"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:40 crc kubenswrapper[4690]: W0320 13:44:40.529079 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ecaabd8_5cb5_4e0f_b5c8_c73075e68880.slice/crio-6e0ceb6ffeb9f26c3293f75a2f69980fb642e229a8c4abe4daa8505765e99fd5 WatchSource:0}: Error finding container 6e0ceb6ffeb9f26c3293f75a2f69980fb642e229a8c4abe4daa8505765e99fd5: Status 404 returned error can't find the container with id 6e0ceb6ffeb9f26c3293f75a2f69980fb642e229a8c4abe4daa8505765e99fd5 Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.535918 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fvfgz"] Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.544328 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "136193db-e17b-463c-b364-6dbd1d765d48" (UID: "136193db-e17b-463c-b364-6dbd1d765d48"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.609679 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "136193db-e17b-463c-b364-6dbd1d765d48" (UID: "136193db-e17b-463c-b364-6dbd1d765d48"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.612294 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.612322 4690 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.612336 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.612349 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x29bn\" (UniqueName: \"kubernetes.io/projected/136193db-e17b-463c-b364-6dbd1d765d48-kube-api-access-x29bn\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.615745 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-config-data" (OuterVolumeSpecName: "config-data") pod "136193db-e17b-463c-b364-6dbd1d765d48" (UID: "136193db-e17b-463c-b364-6dbd1d765d48"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.713705 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/136193db-e17b-463c-b364-6dbd1d765d48-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.956975 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"136193db-e17b-463c-b364-6dbd1d765d48","Type":"ContainerDied","Data":"19480b51f53d89c176bcef3857b014a90d92d08a4ed08ddc6e8c989e1537f715"} Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.957042 4690 scope.go:117] "RemoveContainer" containerID="5ba91f69620e8669f9b646f6661c3ad8042bc475249908c3b10695365a47891b" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.957052 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.958340 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fvfgz" event={"ID":"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880","Type":"ContainerStarted","Data":"6e0ceb6ffeb9f26c3293f75a2f69980fb642e229a8c4abe4daa8505765e99fd5"} Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.974221 4690 scope.go:117] "RemoveContainer" containerID="a99091e52d2a4ec8c112e902e034866fac1e53ef7cb36314e65c54a3b60b5b98" Mar 20 13:44:40 crc kubenswrapper[4690]: I0320 13:44:40.998897 4690 scope.go:117] "RemoveContainer" containerID="982ed374248ba889066c5e194078a786b945ba0132066b6c7fab8d3e33cbf90a" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.009437 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.018760 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.026100 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:41 crc kubenswrapper[4690]: E0320 13:44:41.026444 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="ceilometer-notification-agent" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.026461 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="ceilometer-notification-agent" Mar 20 13:44:41 crc kubenswrapper[4690]: E0320 13:44:41.026474 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="proxy-httpd" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.026480 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="proxy-httpd" Mar 20 13:44:41 crc kubenswrapper[4690]: E0320 13:44:41.026500 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="ceilometer-central-agent" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.026507 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="ceilometer-central-agent" Mar 20 13:44:41 crc kubenswrapper[4690]: E0320 13:44:41.026533 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="sg-core" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.026538 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="sg-core" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.026682 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="ceilometer-central-agent" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.026703 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="proxy-httpd" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.026713 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="sg-core" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.026722 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="136193db-e17b-463c-b364-6dbd1d765d48" containerName="ceilometer-notification-agent" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.028154 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.028756 4690 scope.go:117] "RemoveContainer" containerID="e148622b819b73225d7d154a589a27e4b76a6302f87c12d1e92a4f6019a8dcd2" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.033930 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.034345 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.066597 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.125481 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-scripts\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.125563 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.125603 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-run-httpd\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.125643 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-config-data\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.125714 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zbkh\" (UniqueName: \"kubernetes.io/projected/f72c59e7-4cfe-4ce2-8b13-a27673316486-kube-api-access-9zbkh\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.126070 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.126187 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-log-httpd\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.227926 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zbkh\" (UniqueName: \"kubernetes.io/projected/f72c59e7-4cfe-4ce2-8b13-a27673316486-kube-api-access-9zbkh\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.228013 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.228039 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-log-httpd\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.228078 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-scripts\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.228098 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.228121 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-run-httpd\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.228149 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-config-data\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.228612 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-log-httpd\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.228933 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-run-httpd\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.233657 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-scripts\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.234254 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-config-data\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.234596 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.239740 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.245488 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zbkh\" (UniqueName: \"kubernetes.io/projected/f72c59e7-4cfe-4ce2-8b13-a27673316486-kube-api-access-9zbkh\") pod \"ceilometer-0\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.349099 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.828504 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:41 crc kubenswrapper[4690]: I0320 13:44:41.967636 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerStarted","Data":"1b959690e915233356d1c340b32a2d2c5405997a24baba5c6b92fc621d1d42af"} Mar 20 13:44:42 crc kubenswrapper[4690]: I0320 13:44:42.429628 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="136193db-e17b-463c-b364-6dbd1d765d48" path="/var/lib/kubelet/pods/136193db-e17b-463c-b364-6dbd1d765d48/volumes" Mar 20 13:44:42 crc kubenswrapper[4690]: I0320 13:44:42.978944 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerStarted","Data":"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834"} Mar 20 13:44:47 crc kubenswrapper[4690]: I0320 13:44:47.073377 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:44:47 crc kubenswrapper[4690]: I0320 13:44:47.077307 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerName="glance-log" containerID="cri-o://7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c" gracePeriod=30 Mar 20 13:44:47 crc kubenswrapper[4690]: I0320 13:44:47.077470 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerName="glance-httpd" containerID="cri-o://592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5" gracePeriod=30 Mar 20 13:44:48 crc kubenswrapper[4690]: I0320 13:44:48.031173 4690 generic.go:334] "Generic (PLEG): container finished" podID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerID="7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c" exitCode=143 Mar 20 13:44:48 crc kubenswrapper[4690]: I0320 13:44:48.031216 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"04b6b07b-9136-4036-a8a1-f048b6b41b44","Type":"ContainerDied","Data":"7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c"} Mar 20 13:44:48 crc kubenswrapper[4690]: I0320 13:44:48.177433 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:49 crc kubenswrapper[4690]: I0320 13:44:49.049199 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerStarted","Data":"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c"} Mar 20 13:44:49 crc kubenswrapper[4690]: I0320 13:44:49.051053 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fvfgz" event={"ID":"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880","Type":"ContainerStarted","Data":"1d90b4a3dcc4853a2310f0bd033a3c18fd91aeb397d8ac19266e713093dc867b"} Mar 20 13:44:49 crc kubenswrapper[4690]: I0320 13:44:49.082670 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-fvfgz" podStartSLOduration=1.863640525 podStartE2EDuration="10.082642532s" podCreationTimestamp="2026-03-20 13:44:39 +0000 UTC" firstStartedPulling="2026-03-20 13:44:40.531618325 +0000 UTC m=+1326.821218268" lastFinishedPulling="2026-03-20 13:44:48.750620332 +0000 UTC m=+1335.040220275" observedRunningTime="2026-03-20 13:44:49.075654813 +0000 UTC m=+1335.365254756" watchObservedRunningTime="2026-03-20 13:44:49.082642532 +0000 UTC m=+1335.372242485" Mar 20 13:44:49 crc kubenswrapper[4690]: I0320 13:44:49.100991 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:44:49 crc kubenswrapper[4690]: I0320 13:44:49.101477 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerName="glance-log" containerID="cri-o://c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b" gracePeriod=30 Mar 20 13:44:49 crc kubenswrapper[4690]: I0320 13:44:49.101531 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerName="glance-httpd" containerID="cri-o://ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e" gracePeriod=30 Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.061571 4690 generic.go:334] "Generic (PLEG): container finished" podID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerID="c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b" exitCode=143 Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.061634 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c","Type":"ContainerDied","Data":"c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b"} Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.065704 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerStarted","Data":"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba"} Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.775125 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.906890 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-config-data\") pod \"04b6b07b-9136-4036-a8a1-f048b6b41b44\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.906959 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-public-tls-certs\") pod \"04b6b07b-9136-4036-a8a1-f048b6b41b44\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.907049 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-httpd-run\") pod \"04b6b07b-9136-4036-a8a1-f048b6b41b44\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.907107 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-scripts\") pod \"04b6b07b-9136-4036-a8a1-f048b6b41b44\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.907123 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"04b6b07b-9136-4036-a8a1-f048b6b41b44\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.907165 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-combined-ca-bundle\") pod \"04b6b07b-9136-4036-a8a1-f048b6b41b44\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.907238 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-logs\") pod \"04b6b07b-9136-4036-a8a1-f048b6b41b44\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.907259 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z57rj\" (UniqueName: \"kubernetes.io/projected/04b6b07b-9136-4036-a8a1-f048b6b41b44-kube-api-access-z57rj\") pod \"04b6b07b-9136-4036-a8a1-f048b6b41b44\" (UID: \"04b6b07b-9136-4036-a8a1-f048b6b41b44\") " Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.907641 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "04b6b07b-9136-4036-a8a1-f048b6b41b44" (UID: "04b6b07b-9136-4036-a8a1-f048b6b41b44"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.907938 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-logs" (OuterVolumeSpecName: "logs") pod "04b6b07b-9136-4036-a8a1-f048b6b41b44" (UID: "04b6b07b-9136-4036-a8a1-f048b6b41b44"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.912600 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-scripts" (OuterVolumeSpecName: "scripts") pod "04b6b07b-9136-4036-a8a1-f048b6b41b44" (UID: "04b6b07b-9136-4036-a8a1-f048b6b41b44"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.913049 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04b6b07b-9136-4036-a8a1-f048b6b41b44-kube-api-access-z57rj" (OuterVolumeSpecName: "kube-api-access-z57rj") pod "04b6b07b-9136-4036-a8a1-f048b6b41b44" (UID: "04b6b07b-9136-4036-a8a1-f048b6b41b44"). InnerVolumeSpecName "kube-api-access-z57rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.915913 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "glance") pod "04b6b07b-9136-4036-a8a1-f048b6b41b44" (UID: "04b6b07b-9136-4036-a8a1-f048b6b41b44"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.947658 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "04b6b07b-9136-4036-a8a1-f048b6b41b44" (UID: "04b6b07b-9136-4036-a8a1-f048b6b41b44"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.976035 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-config-data" (OuterVolumeSpecName: "config-data") pod "04b6b07b-9136-4036-a8a1-f048b6b41b44" (UID: "04b6b07b-9136-4036-a8a1-f048b6b41b44"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:50 crc kubenswrapper[4690]: I0320 13:44:50.978247 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "04b6b07b-9136-4036-a8a1-f048b6b41b44" (UID: "04b6b07b-9136-4036-a8a1-f048b6b41b44"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.009603 4690 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-httpd-run\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.009644 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.009686 4690 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.009700 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.009715 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/04b6b07b-9136-4036-a8a1-f048b6b41b44-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.009727 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z57rj\" (UniqueName: \"kubernetes.io/projected/04b6b07b-9136-4036-a8a1-f048b6b41b44-kube-api-access-z57rj\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.009738 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.009750 4690 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/04b6b07b-9136-4036-a8a1-f048b6b41b44-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.033896 4690 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.075057 4690 generic.go:334] "Generic (PLEG): container finished" podID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerID="592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5" exitCode=0 Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.075096 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"04b6b07b-9136-4036-a8a1-f048b6b41b44","Type":"ContainerDied","Data":"592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5"} Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.075125 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"04b6b07b-9136-4036-a8a1-f048b6b41b44","Type":"ContainerDied","Data":"645065f0467b2d067ac9478c8979fd92f7c0745ef05a89212c583dc8945c10e6"} Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.075128 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.075144 4690 scope.go:117] "RemoveContainer" containerID="592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.114815 4690 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.115501 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.116167 4690 scope.go:117] "RemoveContainer" containerID="7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.124082 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.145239 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:44:51 crc kubenswrapper[4690]: E0320 13:44:51.145603 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerName="glance-log" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.145622 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerName="glance-log" Mar 20 13:44:51 crc kubenswrapper[4690]: E0320 13:44:51.145647 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerName="glance-httpd" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.145653 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerName="glance-httpd" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.145820 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerName="glance-httpd" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.145856 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="04b6b07b-9136-4036-a8a1-f048b6b41b44" containerName="glance-log" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.146726 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.149999 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.150778 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.154514 4690 scope.go:117] "RemoveContainer" containerID="592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5" Mar 20 13:44:51 crc kubenswrapper[4690]: E0320 13:44:51.154980 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5\": container with ID starting with 592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5 not found: ID does not exist" containerID="592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.155015 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5"} err="failed to get container status \"592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5\": rpc error: code = NotFound desc = could not find container \"592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5\": container with ID starting with 592d5addbf2541c5bf9fd4e0e1d229409e36c20652c700ee25f5bc7e350798c5 not found: ID does not exist" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.155041 4690 scope.go:117] "RemoveContainer" containerID="7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c" Mar 20 13:44:51 crc kubenswrapper[4690]: E0320 13:44:51.155546 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c\": container with ID starting with 7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c not found: ID does not exist" containerID="7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.155577 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c"} err="failed to get container status \"7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c\": rpc error: code = NotFound desc = could not find container \"7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c\": container with ID starting with 7009a8110a22ab6979de7c2dc438de5d7f05fe44c552f005767b13ab3f399e5c not found: ID does not exist" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.157575 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.217215 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-logs\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.217286 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.217332 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.217361 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-scripts\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.217432 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.217456 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.217499 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-config-data\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.217535 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gvxs\" (UniqueName: \"kubernetes.io/projected/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-kube-api-access-5gvxs\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.319039 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.319150 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.319199 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-scripts\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.319338 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.319379 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.319505 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.320071 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.319482 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-config-data\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.320218 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gvxs\" (UniqueName: \"kubernetes.io/projected/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-kube-api-access-5gvxs\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.320756 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-logs\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.321336 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-logs\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.323732 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.324653 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.325400 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-scripts\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.325709 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-config-data\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.340077 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gvxs\" (UniqueName: \"kubernetes.io/projected/8a099cdf-48ab-4e3a-9d46-88d38d63bdc4-kube-api-access-5gvxs\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.351924 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"glance-default-external-api-0\" (UID: \"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4\") " pod="openstack/glance-default-external-api-0" Mar 20 13:44:51 crc kubenswrapper[4690]: I0320 13:44:51.463009 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.021062 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Mar 20 13:44:52 crc kubenswrapper[4690]: W0320 13:44:52.021814 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8a099cdf_48ab_4e3a_9d46_88d38d63bdc4.slice/crio-2506f3942786c314d33926edaff8009891bc8ddc82de5bf113f8f75fda090f70 WatchSource:0}: Error finding container 2506f3942786c314d33926edaff8009891bc8ddc82de5bf113f8f75fda090f70: Status 404 returned error can't find the container with id 2506f3942786c314d33926edaff8009891bc8ddc82de5bf113f8f75fda090f70 Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.092781 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4","Type":"ContainerStarted","Data":"2506f3942786c314d33926edaff8009891bc8ddc82de5bf113f8f75fda090f70"} Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.109694 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerStarted","Data":"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e"} Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.110144 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.110207 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="proxy-httpd" containerID="cri-o://cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e" gracePeriod=30 Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.110438 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="sg-core" containerID="cri-o://ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba" gracePeriod=30 Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.110535 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="ceilometer-notification-agent" containerID="cri-o://54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c" gracePeriod=30 Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.111964 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="ceilometer-central-agent" containerID="cri-o://745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834" gracePeriod=30 Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.140926 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.607932915 podStartE2EDuration="12.140878181s" podCreationTimestamp="2026-03-20 13:44:40 +0000 UTC" firstStartedPulling="2026-03-20 13:44:41.842424002 +0000 UTC m=+1328.132023945" lastFinishedPulling="2026-03-20 13:44:51.375369268 +0000 UTC m=+1337.664969211" observedRunningTime="2026-03-20 13:44:52.135073326 +0000 UTC m=+1338.424673279" watchObservedRunningTime="2026-03-20 13:44:52.140878181 +0000 UTC m=+1338.430478144" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.150439 4690 scope.go:117] "RemoveContainer" containerID="4d9b99253e553f43b512a97cc9c617f83e42f1a1449e0528efd23f006f78f359" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.423185 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="04b6b07b-9136-4036-a8a1-f048b6b41b44" path="/var/lib/kubelet/pods/04b6b07b-9136-4036-a8a1-f048b6b41b44/volumes" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.859976 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.982781 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-scripts\") pod \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.983094 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-combined-ca-bundle\") pod \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.983179 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-config-data\") pod \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.983308 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.983332 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-logs\") pod \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.983353 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-internal-tls-certs\") pod \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.983372 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-httpd-run\") pod \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.983411 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f8m69\" (UniqueName: \"kubernetes.io/projected/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-kube-api-access-f8m69\") pod \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\" (UID: \"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c\") " Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.983765 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-logs" (OuterVolumeSpecName: "logs") pod "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" (UID: "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.983900 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.984670 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" (UID: "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.985367 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.988949 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-scripts" (OuterVolumeSpecName: "scripts") pod "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" (UID: "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.994045 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-kube-api-access-f8m69" (OuterVolumeSpecName: "kube-api-access-f8m69") pod "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" (UID: "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c"). InnerVolumeSpecName "kube-api-access-f8m69". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:52 crc kubenswrapper[4690]: I0320 13:44:52.998041 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" (UID: "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.034509 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" (UID: "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.056789 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-config-data" (OuterVolumeSpecName: "config-data") pod "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" (UID: "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.060203 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" (UID: "78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.084700 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-sg-core-conf-yaml\") pod \"f72c59e7-4cfe-4ce2-8b13-a27673316486\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.084808 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zbkh\" (UniqueName: \"kubernetes.io/projected/f72c59e7-4cfe-4ce2-8b13-a27673316486-kube-api-access-9zbkh\") pod \"f72c59e7-4cfe-4ce2-8b13-a27673316486\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.084839 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-log-httpd\") pod \"f72c59e7-4cfe-4ce2-8b13-a27673316486\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.084879 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-scripts\") pod \"f72c59e7-4cfe-4ce2-8b13-a27673316486\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.084958 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-combined-ca-bundle\") pod \"f72c59e7-4cfe-4ce2-8b13-a27673316486\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.084994 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-run-httpd\") pod \"f72c59e7-4cfe-4ce2-8b13-a27673316486\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085126 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-config-data\") pod \"f72c59e7-4cfe-4ce2-8b13-a27673316486\" (UID: \"f72c59e7-4cfe-4ce2-8b13-a27673316486\") " Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085298 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "f72c59e7-4cfe-4ce2-8b13-a27673316486" (UID: "f72c59e7-4cfe-4ce2-8b13-a27673316486"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085572 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f8m69\" (UniqueName: \"kubernetes.io/projected/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-kube-api-access-f8m69\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085572 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "f72c59e7-4cfe-4ce2-8b13-a27673316486" (UID: "f72c59e7-4cfe-4ce2-8b13-a27673316486"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085592 4690 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085642 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085790 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085816 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085869 4690 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085883 4690 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.085892 4690 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c-httpd-run\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.088773 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f72c59e7-4cfe-4ce2-8b13-a27673316486-kube-api-access-9zbkh" (OuterVolumeSpecName: "kube-api-access-9zbkh") pod "f72c59e7-4cfe-4ce2-8b13-a27673316486" (UID: "f72c59e7-4cfe-4ce2-8b13-a27673316486"). InnerVolumeSpecName "kube-api-access-9zbkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.089593 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-scripts" (OuterVolumeSpecName: "scripts") pod "f72c59e7-4cfe-4ce2-8b13-a27673316486" (UID: "f72c59e7-4cfe-4ce2-8b13-a27673316486"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.108710 4690 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.110569 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "f72c59e7-4cfe-4ce2-8b13-a27673316486" (UID: "f72c59e7-4cfe-4ce2-8b13-a27673316486"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.127900 4690 generic.go:334] "Generic (PLEG): container finished" podID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerID="ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e" exitCode=0 Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.128167 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c","Type":"ContainerDied","Data":"ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e"} Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.128245 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c","Type":"ContainerDied","Data":"c131b8c03ae6bac2296276ccb47ffc69669081b74786d04928f1686974077ebf"} Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.128339 4690 scope.go:117] "RemoveContainer" containerID="ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.128365 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.136953 4690 generic.go:334] "Generic (PLEG): container finished" podID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerID="cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e" exitCode=0 Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.138534 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.139333 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerDied","Data":"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e"} Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.139371 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerDied","Data":"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba"} Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.144496 4690 generic.go:334] "Generic (PLEG): container finished" podID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerID="ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba" exitCode=2 Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.144559 4690 generic.go:334] "Generic (PLEG): container finished" podID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerID="54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c" exitCode=0 Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.144570 4690 generic.go:334] "Generic (PLEG): container finished" podID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerID="745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834" exitCode=0 Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.144702 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerDied","Data":"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c"} Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.144739 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerDied","Data":"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834"} Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.144753 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"f72c59e7-4cfe-4ce2-8b13-a27673316486","Type":"ContainerDied","Data":"1b959690e915233356d1c340b32a2d2c5405997a24baba5c6b92fc621d1d42af"} Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.150214 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4","Type":"ContainerStarted","Data":"ecff793241d2993a1169fed83398627d92b259644a8928bc6f0592ff050a6eaf"} Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.176479 4690 scope.go:117] "RemoveContainer" containerID="c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.187582 4690 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/f72c59e7-4cfe-4ce2-8b13-a27673316486-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.187624 4690 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.187637 4690 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.187650 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zbkh\" (UniqueName: \"kubernetes.io/projected/f72c59e7-4cfe-4ce2-8b13-a27673316486-kube-api-access-9zbkh\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.187664 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.195801 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.208863 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f72c59e7-4cfe-4ce2-8b13-a27673316486" (UID: "f72c59e7-4cfe-4ce2-8b13-a27673316486"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.213469 4690 scope.go:117] "RemoveContainer" containerID="ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.213599 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.213811 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e\": container with ID starting with ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e not found: ID does not exist" containerID="ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.213860 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e"} err="failed to get container status \"ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e\": rpc error: code = NotFound desc = could not find container \"ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e\": container with ID starting with ef2d8c015145c379ff0a56e22f04ec22a597dc0b9ca0f31cadd47684909f308e not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.213886 4690 scope.go:117] "RemoveContainer" containerID="c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.214663 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b\": container with ID starting with c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b not found: ID does not exist" containerID="c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.214685 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b"} err="failed to get container status \"c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b\": rpc error: code = NotFound desc = could not find container \"c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b\": container with ID starting with c12148f166a8a8f5ee9c0505277168f9001bf2482827e0c8486a2ae393f1ab8b not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.214700 4690 scope.go:117] "RemoveContainer" containerID="cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.222763 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.223469 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerName="glance-httpd" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223492 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerName="glance-httpd" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.223506 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="proxy-httpd" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223512 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="proxy-httpd" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.223525 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerName="glance-log" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223532 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerName="glance-log" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.223547 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="ceilometer-notification-agent" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223553 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="ceilometer-notification-agent" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.223564 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="sg-core" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223570 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="sg-core" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.223582 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="ceilometer-central-agent" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223588 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="ceilometer-central-agent" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223744 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="sg-core" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223755 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="ceilometer-notification-agent" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223771 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerName="glance-log" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223778 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" containerName="glance-httpd" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223789 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="ceilometer-central-agent" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.223797 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" containerName="proxy-httpd" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.224754 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.230406 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.230668 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.232643 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.236027 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-config-data" (OuterVolumeSpecName: "config-data") pod "f72c59e7-4cfe-4ce2-8b13-a27673316486" (UID: "f72c59e7-4cfe-4ce2-8b13-a27673316486"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.282749 4690 scope.go:117] "RemoveContainer" containerID="ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.288601 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.288706 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8gv9\" (UniqueName: \"kubernetes.io/projected/ae12c55b-fd78-4068-bce5-44f82d474701-kube-api-access-d8gv9\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.288794 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.288823 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae12c55b-fd78-4068-bce5-44f82d474701-logs\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.288968 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.289002 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.289219 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.289255 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae12c55b-fd78-4068-bce5-44f82d474701-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.289380 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.289396 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f72c59e7-4cfe-4ce2-8b13-a27673316486-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.306621 4690 scope.go:117] "RemoveContainer" containerID="54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.323630 4690 scope.go:117] "RemoveContainer" containerID="745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.345153 4690 scope.go:117] "RemoveContainer" containerID="cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.345607 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e\": container with ID starting with cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e not found: ID does not exist" containerID="cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.345646 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e"} err="failed to get container status \"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e\": rpc error: code = NotFound desc = could not find container \"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e\": container with ID starting with cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.345680 4690 scope.go:117] "RemoveContainer" containerID="ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.346813 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba\": container with ID starting with ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba not found: ID does not exist" containerID="ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.346886 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba"} err="failed to get container status \"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba\": rpc error: code = NotFound desc = could not find container \"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba\": container with ID starting with ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.346927 4690 scope.go:117] "RemoveContainer" containerID="54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.347284 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c\": container with ID starting with 54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c not found: ID does not exist" containerID="54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.347335 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c"} err="failed to get container status \"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c\": rpc error: code = NotFound desc = could not find container \"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c\": container with ID starting with 54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.347381 4690 scope.go:117] "RemoveContainer" containerID="745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834" Mar 20 13:44:53 crc kubenswrapper[4690]: E0320 13:44:53.347716 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834\": container with ID starting with 745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834 not found: ID does not exist" containerID="745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.347740 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834"} err="failed to get container status \"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834\": rpc error: code = NotFound desc = could not find container \"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834\": container with ID starting with 745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834 not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.347753 4690 scope.go:117] "RemoveContainer" containerID="cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.348103 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e"} err="failed to get container status \"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e\": rpc error: code = NotFound desc = could not find container \"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e\": container with ID starting with cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.348130 4690 scope.go:117] "RemoveContainer" containerID="ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.348352 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba"} err="failed to get container status \"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba\": rpc error: code = NotFound desc = could not find container \"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba\": container with ID starting with ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.348390 4690 scope.go:117] "RemoveContainer" containerID="54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.348635 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c"} err="failed to get container status \"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c\": rpc error: code = NotFound desc = could not find container \"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c\": container with ID starting with 54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.348653 4690 scope.go:117] "RemoveContainer" containerID="745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.348979 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834"} err="failed to get container status \"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834\": rpc error: code = NotFound desc = could not find container \"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834\": container with ID starting with 745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834 not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.348999 4690 scope.go:117] "RemoveContainer" containerID="cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.349223 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e"} err="failed to get container status \"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e\": rpc error: code = NotFound desc = could not find container \"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e\": container with ID starting with cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.349246 4690 scope.go:117] "RemoveContainer" containerID="ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.349474 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba"} err="failed to get container status \"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba\": rpc error: code = NotFound desc = could not find container \"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba\": container with ID starting with ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.349495 4690 scope.go:117] "RemoveContainer" containerID="54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.349707 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c"} err="failed to get container status \"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c\": rpc error: code = NotFound desc = could not find container \"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c\": container with ID starting with 54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.349727 4690 scope.go:117] "RemoveContainer" containerID="745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.350206 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834"} err="failed to get container status \"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834\": rpc error: code = NotFound desc = could not find container \"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834\": container with ID starting with 745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834 not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.350246 4690 scope.go:117] "RemoveContainer" containerID="cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.350552 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e"} err="failed to get container status \"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e\": rpc error: code = NotFound desc = could not find container \"cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e\": container with ID starting with cde489879f416e0e0e0a0aab76563580daae65fe6da5bee5eded2fe3dc3fc14e not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.350575 4690 scope.go:117] "RemoveContainer" containerID="ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.350821 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba"} err="failed to get container status \"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba\": rpc error: code = NotFound desc = could not find container \"ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba\": container with ID starting with ea18a184bffd321cdcdcbba1a9c2255652b372fdb072146fcf4aa424d3dff2ba not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.350890 4690 scope.go:117] "RemoveContainer" containerID="54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.351130 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c"} err="failed to get container status \"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c\": rpc error: code = NotFound desc = could not find container \"54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c\": container with ID starting with 54621ef4e82ffff3bdc2ff21b69c0dfaee458b02f076b5ddb610b5ef9203e73c not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.351150 4690 scope.go:117] "RemoveContainer" containerID="745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.351417 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834"} err="failed to get container status \"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834\": rpc error: code = NotFound desc = could not find container \"745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834\": container with ID starting with 745138abe0a440885427a87b373805b01b6a066da0410281212a1029d65e3834 not found: ID does not exist" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.390880 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8gv9\" (UniqueName: \"kubernetes.io/projected/ae12c55b-fd78-4068-bce5-44f82d474701-kube-api-access-d8gv9\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391005 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391043 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae12c55b-fd78-4068-bce5-44f82d474701-logs\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391076 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391115 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391206 4690 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391506 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391575 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae12c55b-fd78-4068-bce5-44f82d474701-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391665 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391947 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ae12c55b-fd78-4068-bce5-44f82d474701-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.391528 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae12c55b-fd78-4068-bce5-44f82d474701-logs\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.395576 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.395796 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.396272 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.396321 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ae12c55b-fd78-4068-bce5-44f82d474701-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.408554 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8gv9\" (UniqueName: \"kubernetes.io/projected/ae12c55b-fd78-4068-bce5-44f82d474701-kube-api-access-d8gv9\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.431369 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ae12c55b-fd78-4068-bce5-44f82d474701\") " pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.479201 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.494022 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.505468 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.507464 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.515163 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.515921 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.518770 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.554651 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.594789 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-run-httpd\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.594840 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-scripts\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.594944 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.595001 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l48c8\" (UniqueName: \"kubernetes.io/projected/8f339415-4513-4705-865e-458eb3e25741-kube-api-access-l48c8\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.595070 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-config-data\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.595147 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.595191 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-log-httpd\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.696787 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-config-data\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.696941 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.696972 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-log-httpd\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.697527 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-log-httpd\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.697698 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-run-httpd\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.697777 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-scripts\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.697806 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.697888 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l48c8\" (UniqueName: \"kubernetes.io/projected/8f339415-4513-4705-865e-458eb3e25741-kube-api-access-l48c8\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.698084 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-run-httpd\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.712068 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.712269 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.712978 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-config-data\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.714451 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-scripts\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.721596 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l48c8\" (UniqueName: \"kubernetes.io/projected/8f339415-4513-4705-865e-458eb3e25741-kube-api-access-l48c8\") pod \"ceilometer-0\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " pod="openstack/ceilometer-0" Mar 20 13:44:53 crc kubenswrapper[4690]: I0320 13:44:53.825691 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:44:54 crc kubenswrapper[4690]: I0320 13:44:54.090827 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Mar 20 13:44:54 crc kubenswrapper[4690]: I0320 13:44:54.160726 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ae12c55b-fd78-4068-bce5-44f82d474701","Type":"ContainerStarted","Data":"3e2eff251f567a28d75a7aff4a5a3ce66db8f3df47736d9936a1819c2cd74cad"} Mar 20 13:44:54 crc kubenswrapper[4690]: I0320 13:44:54.162997 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"8a099cdf-48ab-4e3a-9d46-88d38d63bdc4","Type":"ContainerStarted","Data":"723468f9cf0a8d629a1917cc461f77aca23bc868e41c195300ce8b79545ebd1e"} Mar 20 13:44:54 crc kubenswrapper[4690]: I0320 13:44:54.191750 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=3.191729918 podStartE2EDuration="3.191729918s" podCreationTimestamp="2026-03-20 13:44:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:44:54.185573502 +0000 UTC m=+1340.475173435" watchObservedRunningTime="2026-03-20 13:44:54.191729918 +0000 UTC m=+1340.481329871" Mar 20 13:44:54 crc kubenswrapper[4690]: I0320 13:44:54.268603 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:44:54 crc kubenswrapper[4690]: W0320 13:44:54.278641 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8f339415_4513_4705_865e_458eb3e25741.slice/crio-eef3630ef08b68fec4163e4bdfa73bd4baacd33e6b2c329f9e7325a4d301697a WatchSource:0}: Error finding container eef3630ef08b68fec4163e4bdfa73bd4baacd33e6b2c329f9e7325a4d301697a: Status 404 returned error can't find the container with id eef3630ef08b68fec4163e4bdfa73bd4baacd33e6b2c329f9e7325a4d301697a Mar 20 13:44:54 crc kubenswrapper[4690]: I0320 13:44:54.424502 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c" path="/var/lib/kubelet/pods/78b4f9f6-8f8f-4fa8-9dd1-bfa228f3ad7c/volumes" Mar 20 13:44:54 crc kubenswrapper[4690]: I0320 13:44:54.425418 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f72c59e7-4cfe-4ce2-8b13-a27673316486" path="/var/lib/kubelet/pods/f72c59e7-4cfe-4ce2-8b13-a27673316486/volumes" Mar 20 13:44:55 crc kubenswrapper[4690]: I0320 13:44:55.182343 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ae12c55b-fd78-4068-bce5-44f82d474701","Type":"ContainerStarted","Data":"0eaa74f71d0dd501ebb03422e560b37a3c20bcd00d9b2eba51f7a3b6a35e73e6"} Mar 20 13:44:55 crc kubenswrapper[4690]: I0320 13:44:55.185520 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerStarted","Data":"eef3630ef08b68fec4163e4bdfa73bd4baacd33e6b2c329f9e7325a4d301697a"} Mar 20 13:44:56 crc kubenswrapper[4690]: I0320 13:44:56.196340 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerStarted","Data":"992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f"} Mar 20 13:44:56 crc kubenswrapper[4690]: I0320 13:44:56.197006 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerStarted","Data":"b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13"} Mar 20 13:44:56 crc kubenswrapper[4690]: I0320 13:44:56.198255 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ae12c55b-fd78-4068-bce5-44f82d474701","Type":"ContainerStarted","Data":"2834e5cd95ba239c377d1a968ca218af3ed145b833a2a799fe6772bc835c6e3d"} Mar 20 13:44:56 crc kubenswrapper[4690]: I0320 13:44:56.265021 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.265003782 podStartE2EDuration="3.265003782s" podCreationTimestamp="2026-03-20 13:44:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:44:56.256458829 +0000 UTC m=+1342.546058772" watchObservedRunningTime="2026-03-20 13:44:56.265003782 +0000 UTC m=+1342.554603725" Mar 20 13:44:57 crc kubenswrapper[4690]: I0320 13:44:57.210313 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerStarted","Data":"1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1"} Mar 20 13:44:59 crc kubenswrapper[4690]: I0320 13:44:59.233880 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerStarted","Data":"6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53"} Mar 20 13:44:59 crc kubenswrapper[4690]: I0320 13:44:59.234588 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Mar 20 13:44:59 crc kubenswrapper[4690]: I0320 13:44:59.236663 4690 generic.go:334] "Generic (PLEG): container finished" podID="8ecaabd8-5cb5-4e0f-b5c8-c73075e68880" containerID="1d90b4a3dcc4853a2310f0bd033a3c18fd91aeb397d8ac19266e713093dc867b" exitCode=0 Mar 20 13:44:59 crc kubenswrapper[4690]: I0320 13:44:59.236743 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fvfgz" event={"ID":"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880","Type":"ContainerDied","Data":"1d90b4a3dcc4853a2310f0bd033a3c18fd91aeb397d8ac19266e713093dc867b"} Mar 20 13:44:59 crc kubenswrapper[4690]: I0320 13:44:59.270537 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.058043026 podStartE2EDuration="6.270510558s" podCreationTimestamp="2026-03-20 13:44:53 +0000 UTC" firstStartedPulling="2026-03-20 13:44:54.282097445 +0000 UTC m=+1340.571697388" lastFinishedPulling="2026-03-20 13:44:58.494564977 +0000 UTC m=+1344.784164920" observedRunningTime="2026-03-20 13:44:59.266590756 +0000 UTC m=+1345.556190699" watchObservedRunningTime="2026-03-20 13:44:59.270510558 +0000 UTC m=+1345.560110531" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.161198 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh"] Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.162517 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.166088 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.166530 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.182197 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh"] Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.230133 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7v6xl\" (UniqueName: \"kubernetes.io/projected/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-kube-api-access-7v6xl\") pod \"collect-profiles-29566905-svfxh\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.230197 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-secret-volume\") pod \"collect-profiles-29566905-svfxh\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.230408 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-config-volume\") pod \"collect-profiles-29566905-svfxh\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.332731 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7v6xl\" (UniqueName: \"kubernetes.io/projected/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-kube-api-access-7v6xl\") pod \"collect-profiles-29566905-svfxh\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.332819 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-secret-volume\") pod \"collect-profiles-29566905-svfxh\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.332958 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-config-volume\") pod \"collect-profiles-29566905-svfxh\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.335093 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-config-volume\") pod \"collect-profiles-29566905-svfxh\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.342134 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-secret-volume\") pod \"collect-profiles-29566905-svfxh\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.365544 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7v6xl\" (UniqueName: \"kubernetes.io/projected/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-kube-api-access-7v6xl\") pod \"collect-profiles-29566905-svfxh\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.494658 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.633331 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.738108 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-scripts\") pod \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.738166 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-config-data\") pod \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.738297 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-combined-ca-bundle\") pod \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.738399 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-66g97\" (UniqueName: \"kubernetes.io/projected/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-kube-api-access-66g97\") pod \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\" (UID: \"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880\") " Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.745801 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-kube-api-access-66g97" (OuterVolumeSpecName: "kube-api-access-66g97") pod "8ecaabd8-5cb5-4e0f-b5c8-c73075e68880" (UID: "8ecaabd8-5cb5-4e0f-b5c8-c73075e68880"). InnerVolumeSpecName "kube-api-access-66g97". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.754628 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-scripts" (OuterVolumeSpecName: "scripts") pod "8ecaabd8-5cb5-4e0f-b5c8-c73075e68880" (UID: "8ecaabd8-5cb5-4e0f-b5c8-c73075e68880"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.772503 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8ecaabd8-5cb5-4e0f-b5c8-c73075e68880" (UID: "8ecaabd8-5cb5-4e0f-b5c8-c73075e68880"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.782651 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-config-data" (OuterVolumeSpecName: "config-data") pod "8ecaabd8-5cb5-4e0f-b5c8-c73075e68880" (UID: "8ecaabd8-5cb5-4e0f-b5c8-c73075e68880"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.840282 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.840311 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-66g97\" (UniqueName: \"kubernetes.io/projected/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-kube-api-access-66g97\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.840322 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.840331 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:00 crc kubenswrapper[4690]: I0320 13:45:00.943096 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh"] Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.267641 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fvfgz" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.267923 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fvfgz" event={"ID":"8ecaabd8-5cb5-4e0f-b5c8-c73075e68880","Type":"ContainerDied","Data":"6e0ceb6ffeb9f26c3293f75a2f69980fb642e229a8c4abe4daa8505765e99fd5"} Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.268107 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e0ceb6ffeb9f26c3293f75a2f69980fb642e229a8c4abe4daa8505765e99fd5" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.269811 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" event={"ID":"8a5a51b4-6d44-4909-a83e-3f9a4dffd406","Type":"ContainerStarted","Data":"6c864ea9e0a9a8b6d5ba5724121944e7894e5d1e7131ed381131525a00c3438e"} Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.269892 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" event={"ID":"8a5a51b4-6d44-4909-a83e-3f9a4dffd406","Type":"ContainerStarted","Data":"f93d3a1d4cf42210fe7778ea301ae940d5481696dd8d6abb311b79182d971ae8"} Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.323189 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" podStartSLOduration=1.323162116 podStartE2EDuration="1.323162116s" podCreationTimestamp="2026-03-20 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:01.307997533 +0000 UTC m=+1347.597597516" watchObservedRunningTime="2026-03-20 13:45:01.323162116 +0000 UTC m=+1347.612762089" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.464089 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.464145 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.514244 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Mar 20 13:45:01 crc kubenswrapper[4690]: E0320 13:45:01.515469 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ecaabd8-5cb5-4e0f-b5c8-c73075e68880" containerName="nova-cell0-conductor-db-sync" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.515489 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ecaabd8-5cb5-4e0f-b5c8-c73075e68880" containerName="nova-cell0-conductor-db-sync" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.515706 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ecaabd8-5cb5-4e0f-b5c8-c73075e68880" containerName="nova-cell0-conductor-db-sync" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.516247 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.516826 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.520087 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.522015 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.525268 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-m8lsr" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.527492 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.657935 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdnxn\" (UniqueName: \"kubernetes.io/projected/4d06de43-2f21-4b70-8a38-9d7dbf386ada-kube-api-access-qdnxn\") pod \"nova-cell0-conductor-0\" (UID: \"4d06de43-2f21-4b70-8a38-9d7dbf386ada\") " pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.657972 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d06de43-2f21-4b70-8a38-9d7dbf386ada-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4d06de43-2f21-4b70-8a38-9d7dbf386ada\") " pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.658085 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d06de43-2f21-4b70-8a38-9d7dbf386ada-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4d06de43-2f21-4b70-8a38-9d7dbf386ada\") " pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.759994 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d06de43-2f21-4b70-8a38-9d7dbf386ada-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4d06de43-2f21-4b70-8a38-9d7dbf386ada\") " pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.760094 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdnxn\" (UniqueName: \"kubernetes.io/projected/4d06de43-2f21-4b70-8a38-9d7dbf386ada-kube-api-access-qdnxn\") pod \"nova-cell0-conductor-0\" (UID: \"4d06de43-2f21-4b70-8a38-9d7dbf386ada\") " pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.760124 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d06de43-2f21-4b70-8a38-9d7dbf386ada-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4d06de43-2f21-4b70-8a38-9d7dbf386ada\") " pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.769816 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d06de43-2f21-4b70-8a38-9d7dbf386ada-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"4d06de43-2f21-4b70-8a38-9d7dbf386ada\") " pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.782114 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d06de43-2f21-4b70-8a38-9d7dbf386ada-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"4d06de43-2f21-4b70-8a38-9d7dbf386ada\") " pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.784553 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdnxn\" (UniqueName: \"kubernetes.io/projected/4d06de43-2f21-4b70-8a38-9d7dbf386ada-kube-api-access-qdnxn\") pod \"nova-cell0-conductor-0\" (UID: \"4d06de43-2f21-4b70-8a38-9d7dbf386ada\") " pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:01 crc kubenswrapper[4690]: I0320 13:45:01.837414 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:02 crc kubenswrapper[4690]: I0320 13:45:02.290507 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" event={"ID":"8a5a51b4-6d44-4909-a83e-3f9a4dffd406","Type":"ContainerDied","Data":"6c864ea9e0a9a8b6d5ba5724121944e7894e5d1e7131ed381131525a00c3438e"} Mar 20 13:45:02 crc kubenswrapper[4690]: I0320 13:45:02.300956 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Mar 20 13:45:02 crc kubenswrapper[4690]: I0320 13:45:02.290958 4690 generic.go:334] "Generic (PLEG): container finished" podID="8a5a51b4-6d44-4909-a83e-3f9a4dffd406" containerID="6c864ea9e0a9a8b6d5ba5724121944e7894e5d1e7131ed381131525a00c3438e" exitCode=0 Mar 20 13:45:02 crc kubenswrapper[4690]: I0320 13:45:02.302350 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Mar 20 13:45:02 crc kubenswrapper[4690]: I0320 13:45:02.302577 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Mar 20 13:45:02 crc kubenswrapper[4690]: W0320 13:45:02.314293 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4d06de43_2f21_4b70_8a38_9d7dbf386ada.slice/crio-b1ee3909d484fe199612d331cf82908bcd19fa5bbb86582caf18c07c40b30ce3 WatchSource:0}: Error finding container b1ee3909d484fe199612d331cf82908bcd19fa5bbb86582caf18c07c40b30ce3: Status 404 returned error can't find the container with id b1ee3909d484fe199612d331cf82908bcd19fa5bbb86582caf18c07c40b30ce3 Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.329063 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4d06de43-2f21-4b70-8a38-9d7dbf386ada","Type":"ContainerStarted","Data":"b8ab60072ace3c30a279c6bf3f1d82b253382fff754cbec4304c9d2b322baac4"} Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.329720 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"4d06de43-2f21-4b70-8a38-9d7dbf386ada","Type":"ContainerStarted","Data":"b1ee3909d484fe199612d331cf82908bcd19fa5bbb86582caf18c07c40b30ce3"} Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.359764 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.359741154 podStartE2EDuration="2.359741154s" podCreationTimestamp="2026-03-20 13:45:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:03.346579838 +0000 UTC m=+1349.636179801" watchObservedRunningTime="2026-03-20 13:45:03.359741154 +0000 UTC m=+1349.649341107" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.555905 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.556115 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.595560 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.631100 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.707785 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.814686 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-secret-volume\") pod \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.815078 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-config-volume\") pod \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.815188 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7v6xl\" (UniqueName: \"kubernetes.io/projected/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-kube-api-access-7v6xl\") pod \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\" (UID: \"8a5a51b4-6d44-4909-a83e-3f9a4dffd406\") " Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.817006 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-config-volume" (OuterVolumeSpecName: "config-volume") pod "8a5a51b4-6d44-4909-a83e-3f9a4dffd406" (UID: "8a5a51b4-6d44-4909-a83e-3f9a4dffd406"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.824051 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8a5a51b4-6d44-4909-a83e-3f9a4dffd406" (UID: "8a5a51b4-6d44-4909-a83e-3f9a4dffd406"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.824192 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-kube-api-access-7v6xl" (OuterVolumeSpecName: "kube-api-access-7v6xl") pod "8a5a51b4-6d44-4909-a83e-3f9a4dffd406" (UID: "8a5a51b4-6d44-4909-a83e-3f9a4dffd406"). InnerVolumeSpecName "kube-api-access-7v6xl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.829572 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.829639 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.829679 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.830353 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"61fd0b68cc3ec6d77f02280694bc855224d1387694bd1b6f59471ac008b5cb66"} pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.830402 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" containerID="cri-o://61fd0b68cc3ec6d77f02280694bc855224d1387694bd1b6f59471ac008b5cb66" gracePeriod=600 Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.918288 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7v6xl\" (UniqueName: \"kubernetes.io/projected/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-kube-api-access-7v6xl\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.918918 4690 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-secret-volume\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:03 crc kubenswrapper[4690]: I0320 13:45:03.919021 4690 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8a5a51b4-6d44-4909-a83e-3f9a4dffd406-config-volume\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.135754 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.146520 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.340494 4690 generic.go:334] "Generic (PLEG): container finished" podID="60ded650-b298-4115-8286-8969b94d4062" containerID="61fd0b68cc3ec6d77f02280694bc855224d1387694bd1b6f59471ac008b5cb66" exitCode=0 Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.340557 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerDied","Data":"61fd0b68cc3ec6d77f02280694bc855224d1387694bd1b6f59471ac008b5cb66"} Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.340588 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"5e696252e251066c1296443f70dfdb2d4815582a27f8d0fb1a1a2dd90457b26f"} Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.340609 4690 scope.go:117] "RemoveContainer" containerID="1cbfcd3ff515926ff968741597036e24520ad18c5c3213b253b31335bc1c23e8" Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.343774 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.343966 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29566905-svfxh" event={"ID":"8a5a51b4-6d44-4909-a83e-3f9a4dffd406","Type":"ContainerDied","Data":"f93d3a1d4cf42210fe7778ea301ae940d5481696dd8d6abb311b79182d971ae8"} Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.343997 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f93d3a1d4cf42210fe7778ea301ae940d5481696dd8d6abb311b79182d971ae8" Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.345818 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.345877 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Mar 20 13:45:04 crc kubenswrapper[4690]: I0320 13:45:04.345891 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Mar 20 13:45:06 crc kubenswrapper[4690]: I0320 13:45:06.222738 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Mar 20 13:45:06 crc kubenswrapper[4690]: I0320 13:45:06.225563 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Mar 20 13:45:11 crc kubenswrapper[4690]: I0320 13:45:11.885178 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.396292 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-sb7x2"] Mar 20 13:45:12 crc kubenswrapper[4690]: E0320 13:45:12.396749 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a5a51b4-6d44-4909-a83e-3f9a4dffd406" containerName="collect-profiles" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.396768 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a5a51b4-6d44-4909-a83e-3f9a4dffd406" containerName="collect-profiles" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.397065 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a5a51b4-6d44-4909-a83e-3f9a4dffd406" containerName="collect-profiles" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.397770 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.402939 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.404052 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.409916 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-sb7x2"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.502862 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-scripts\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.502924 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-config-data\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.502969 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.503028 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsbs9\" (UniqueName: \"kubernetes.io/projected/69168ed4-2cdf-4be8-8ae0-917d89a54670-kube-api-access-fsbs9\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.582504 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.584560 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.588839 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.597216 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.605997 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-scripts\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.606059 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-config-data\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.606098 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.606136 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsbs9\" (UniqueName: \"kubernetes.io/projected/69168ed4-2cdf-4be8-8ae0-917d89a54670-kube-api-access-fsbs9\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.616442 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-config-data\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.618928 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-scripts\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.621591 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.627791 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.629446 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.646654 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.668672 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsbs9\" (UniqueName: \"kubernetes.io/projected/69168ed4-2cdf-4be8-8ae0-917d89a54670-kube-api-access-fsbs9\") pod \"nova-cell0-cell-mapping-sb7x2\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.670695 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.715880 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-config-data\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.715929 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd53669d-e52f-4802-993c-5333dec1949d-logs\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.715968 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg22p\" (UniqueName: \"kubernetes.io/projected/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-kube-api-access-xg22p\") pod \"nova-scheduler-0\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.715993 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hxp64\" (UniqueName: \"kubernetes.io/projected/cd53669d-e52f-4802-993c-5333dec1949d-kube-api-access-hxp64\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.716015 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.716069 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-config-data\") pod \"nova-scheduler-0\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.716083 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.718411 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.728503 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.730058 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.733943 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.735836 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818128 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818222 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e58b84ab-69a5-4737-afda-6293d71a1daf-logs\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818245 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-config-data\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818273 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818295 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-config-data\") pod \"nova-scheduler-0\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818309 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818391 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb7d7\" (UniqueName: \"kubernetes.io/projected/e58b84ab-69a5-4737-afda-6293d71a1daf-kube-api-access-pb7d7\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818408 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-config-data\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818441 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd53669d-e52f-4802-993c-5333dec1949d-logs\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818477 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg22p\" (UniqueName: \"kubernetes.io/projected/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-kube-api-access-xg22p\") pod \"nova-scheduler-0\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.818514 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hxp64\" (UniqueName: \"kubernetes.io/projected/cd53669d-e52f-4802-993c-5333dec1949d-kube-api-access-hxp64\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.821420 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd53669d-e52f-4802-993c-5333dec1949d-logs\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.832285 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.835453 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-config-data\") pod \"nova-scheduler-0\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.835982 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.839750 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.840830 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.843391 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.847941 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-config-data\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.860320 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hxp64\" (UniqueName: \"kubernetes.io/projected/cd53669d-e52f-4802-993c-5333dec1949d-kube-api-access-hxp64\") pod \"nova-api-0\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.866122 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.868612 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg22p\" (UniqueName: \"kubernetes.io/projected/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-kube-api-access-xg22p\") pod \"nova-scheduler-0\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.892026 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.892409 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-wq9qf"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.893777 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.905367 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-wq9qf"] Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.946434 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.973254 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.973424 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e58b84ab-69a5-4737-afda-6293d71a1daf-logs\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.973449 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-config-data\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.973471 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.973590 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.973678 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bpdqx\" (UniqueName: \"kubernetes.io/projected/d9011473-96a7-40eb-ab1b-5cf0758991e3-kube-api-access-bpdqx\") pod \"nova-cell1-novncproxy-0\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.973706 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb7d7\" (UniqueName: \"kubernetes.io/projected/e58b84ab-69a5-4737-afda-6293d71a1daf-kube-api-access-pb7d7\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.981779 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e58b84ab-69a5-4737-afda-6293d71a1daf-logs\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.984287 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-config-data\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:12 crc kubenswrapper[4690]: I0320 13:45:12.996678 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb7d7\" (UniqueName: \"kubernetes.io/projected/e58b84ab-69a5-4737-afda-6293d71a1daf-kube-api-access-pb7d7\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.027639 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " pod="openstack/nova-metadata-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.082257 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98wsp\" (UniqueName: \"kubernetes.io/projected/241eb71a-d9c7-4281-a71c-ed845166f03c-kube-api-access-98wsp\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.082305 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.082349 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.082389 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bpdqx\" (UniqueName: \"kubernetes.io/projected/d9011473-96a7-40eb-ab1b-5cf0758991e3-kube-api-access-bpdqx\") pod \"nova-cell1-novncproxy-0\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.082409 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-svc\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.082469 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-config\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.082496 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.082521 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.082539 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.095787 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.099604 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.106164 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bpdqx\" (UniqueName: \"kubernetes.io/projected/d9011473-96a7-40eb-ab1b-5cf0758991e3-kube-api-access-bpdqx\") pod \"nova-cell1-novncproxy-0\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.191493 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.191529 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.191588 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98wsp\" (UniqueName: \"kubernetes.io/projected/241eb71a-d9c7-4281-a71c-ed845166f03c-kube-api-access-98wsp\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.191611 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.191668 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-svc\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.191724 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-config\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.192307 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-config\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.192854 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-nb\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.192307 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-swift-storage-0\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.193323 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-sb\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.193359 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-svc\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.208282 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.214740 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98wsp\" (UniqueName: \"kubernetes.io/projected/241eb71a-d9c7-4281-a71c-ed845166f03c-kube-api-access-98wsp\") pod \"dnsmasq-dns-bccf8f775-wq9qf\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.253549 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.291374 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.382969 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-sb7x2"] Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.473028 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-sb7x2" event={"ID":"69168ed4-2cdf-4be8-8ae0-917d89a54670","Type":"ContainerStarted","Data":"a70aedc7ebd7513bfcfe45ab67fb1e342007f8d4cbd25927a17402babe51ca84"} Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.503676 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.519120 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:13 crc kubenswrapper[4690]: W0320 13:45:13.539765 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c9ded12_3b6b_4aa1_9320_5bd58c7c8097.slice/crio-106a162122e6b4647fb66a44a827909992fa00aecf621f379c1808bb2d775ab1 WatchSource:0}: Error finding container 106a162122e6b4647fb66a44a827909992fa00aecf621f379c1808bb2d775ab1: Status 404 returned error can't find the container with id 106a162122e6b4647fb66a44a827909992fa00aecf621f379c1808bb2d775ab1 Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.554926 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pr965"] Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.556232 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.558551 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.558691 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.574563 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pr965"] Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.701878 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-config-data\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.702208 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-scripts\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.702236 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2z7wg\" (UniqueName: \"kubernetes.io/projected/f50d733d-5439-49fc-af1b-bb36c5b3c739-kube-api-access-2z7wg\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.702256 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.738951 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.805165 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-scripts\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.805221 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2z7wg\" (UniqueName: \"kubernetes.io/projected/f50d733d-5439-49fc-af1b-bb36c5b3c739-kube-api-access-2z7wg\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.805246 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.805337 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-config-data\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.810985 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-config-data\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.811415 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.812929 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-scripts\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.822576 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2z7wg\" (UniqueName: \"kubernetes.io/projected/f50d733d-5439-49fc-af1b-bb36c5b3c739-kube-api-access-2z7wg\") pod \"nova-cell1-conductor-db-sync-pr965\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.891181 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:13 crc kubenswrapper[4690]: W0320 13:45:13.896687 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9011473_96a7_40eb_ab1b_5cf0758991e3.slice/crio-53c8946a2c440e153003b853f4e7d5285c8365f5f2abe3349dcfce72d15dd67c WatchSource:0}: Error finding container 53c8946a2c440e153003b853f4e7d5285c8365f5f2abe3349dcfce72d15dd67c: Status 404 returned error can't find the container with id 53c8946a2c440e153003b853f4e7d5285c8365f5f2abe3349dcfce72d15dd67c Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.912595 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Mar 20 13:45:13 crc kubenswrapper[4690]: I0320 13:45:13.985276 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-wq9qf"] Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.375730 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pr965"] Mar 20 13:45:14 crc kubenswrapper[4690]: W0320 13:45:14.386789 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf50d733d_5439_49fc_af1b_bb36c5b3c739.slice/crio-970057bbee861539270bee85d36d36bedb5b6581443325f2427d7e02b9f1cee2 WatchSource:0}: Error finding container 970057bbee861539270bee85d36d36bedb5b6581443325f2427d7e02b9f1cee2: Status 404 returned error can't find the container with id 970057bbee861539270bee85d36d36bedb5b6581443325f2427d7e02b9f1cee2 Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.494558 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097","Type":"ContainerStarted","Data":"106a162122e6b4647fb66a44a827909992fa00aecf621f379c1808bb2d775ab1"} Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.496230 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-sb7x2" event={"ID":"69168ed4-2cdf-4be8-8ae0-917d89a54670","Type":"ContainerStarted","Data":"f3edb3590e62d270cce5c3faa54306f716894ae1c8e4f9c6f5670ef8c23020e4"} Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.499361 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd53669d-e52f-4802-993c-5333dec1949d","Type":"ContainerStarted","Data":"92f18aa674afbefaf3052418831d8971004e92beb4c8bc068a63c6beb2126751"} Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.501407 4690 generic.go:334] "Generic (PLEG): container finished" podID="241eb71a-d9c7-4281-a71c-ed845166f03c" containerID="583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e" exitCode=0 Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.501462 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" event={"ID":"241eb71a-d9c7-4281-a71c-ed845166f03c","Type":"ContainerDied","Data":"583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e"} Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.501507 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" event={"ID":"241eb71a-d9c7-4281-a71c-ed845166f03c","Type":"ContainerStarted","Data":"e746e26c02573aa486fd56cf9d5295bf689e22d6192d632921c511c814cacdc4"} Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.516269 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e58b84ab-69a5-4737-afda-6293d71a1daf","Type":"ContainerStarted","Data":"cb97f28e871491c4dc33d7cbf172b5ed8c0c6390f852da91676edad303f2545a"} Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.525171 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d9011473-96a7-40eb-ab1b-5cf0758991e3","Type":"ContainerStarted","Data":"53c8946a2c440e153003b853f4e7d5285c8365f5f2abe3349dcfce72d15dd67c"} Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.526757 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pr965" event={"ID":"f50d733d-5439-49fc-af1b-bb36c5b3c739","Type":"ContainerStarted","Data":"970057bbee861539270bee85d36d36bedb5b6581443325f2427d7e02b9f1cee2"} Mar 20 13:45:14 crc kubenswrapper[4690]: I0320 13:45:14.605588 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-sb7x2" podStartSLOduration=2.605571705 podStartE2EDuration="2.605571705s" podCreationTimestamp="2026-03-20 13:45:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:14.587199421 +0000 UTC m=+1360.876799374" watchObservedRunningTime="2026-03-20 13:45:14.605571705 +0000 UTC m=+1360.895171648" Mar 20 13:45:15 crc kubenswrapper[4690]: I0320 13:45:15.555914 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pr965" event={"ID":"f50d733d-5439-49fc-af1b-bb36c5b3c739","Type":"ContainerStarted","Data":"7c70b70602baf750e77d197fee8c81b82c32f4e3bb0697171ced29df5fe816d9"} Mar 20 13:45:15 crc kubenswrapper[4690]: I0320 13:45:15.590401 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-pr965" podStartSLOduration=2.590382115 podStartE2EDuration="2.590382115s" podCreationTimestamp="2026-03-20 13:45:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:15.585170506 +0000 UTC m=+1361.874770449" watchObservedRunningTime="2026-03-20 13:45:15.590382115 +0000 UTC m=+1361.879982058" Mar 20 13:45:16 crc kubenswrapper[4690]: I0320 13:45:16.398150 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:45:16 crc kubenswrapper[4690]: I0320 13:45:16.424248 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.589364 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097","Type":"ContainerStarted","Data":"7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8"} Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.600271 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd53669d-e52f-4802-993c-5333dec1949d","Type":"ContainerStarted","Data":"33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945"} Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.600716 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd53669d-e52f-4802-993c-5333dec1949d","Type":"ContainerStarted","Data":"8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38"} Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.608064 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" event={"ID":"241eb71a-d9c7-4281-a71c-ed845166f03c","Type":"ContainerStarted","Data":"864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526"} Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.608291 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.611089 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e58b84ab-69a5-4737-afda-6293d71a1daf","Type":"ContainerStarted","Data":"07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def"} Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.611257 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e58b84ab-69a5-4737-afda-6293d71a1daf","Type":"ContainerStarted","Data":"a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d"} Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.611513 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerName="nova-metadata-log" containerID="cri-o://a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d" gracePeriod=30 Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.612008 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerName="nova-metadata-metadata" containerID="cri-o://07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def" gracePeriod=30 Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.623098 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d9011473-96a7-40eb-ab1b-5cf0758991e3","Type":"ContainerStarted","Data":"8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c"} Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.623250 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="d9011473-96a7-40eb-ab1b-5cf0758991e3" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c" gracePeriod=30 Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.628641 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.304220609 podStartE2EDuration="6.628618853s" podCreationTimestamp="2026-03-20 13:45:12 +0000 UTC" firstStartedPulling="2026-03-20 13:45:13.544611193 +0000 UTC m=+1359.834211136" lastFinishedPulling="2026-03-20 13:45:17.869009397 +0000 UTC m=+1364.158609380" observedRunningTime="2026-03-20 13:45:18.610046903 +0000 UTC m=+1364.899646866" watchObservedRunningTime="2026-03-20 13:45:18.628618853 +0000 UTC m=+1364.918218806" Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.646982 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.290960631 podStartE2EDuration="6.646958106s" podCreationTimestamp="2026-03-20 13:45:12 +0000 UTC" firstStartedPulling="2026-03-20 13:45:13.516069989 +0000 UTC m=+1359.805669932" lastFinishedPulling="2026-03-20 13:45:17.872067454 +0000 UTC m=+1364.161667407" observedRunningTime="2026-03-20 13:45:18.631890396 +0000 UTC m=+1364.921490329" watchObservedRunningTime="2026-03-20 13:45:18.646958106 +0000 UTC m=+1364.936558039" Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.652414 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" podStartSLOduration=6.652394271 podStartE2EDuration="6.652394271s" podCreationTimestamp="2026-03-20 13:45:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:18.649588961 +0000 UTC m=+1364.939188904" watchObservedRunningTime="2026-03-20 13:45:18.652394271 +0000 UTC m=+1364.941994214" Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.684504 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.551481722 podStartE2EDuration="6.684489677s" podCreationTimestamp="2026-03-20 13:45:12 +0000 UTC" firstStartedPulling="2026-03-20 13:45:13.753395308 +0000 UTC m=+1360.042995251" lastFinishedPulling="2026-03-20 13:45:17.886403243 +0000 UTC m=+1364.176003206" observedRunningTime="2026-03-20 13:45:18.681301636 +0000 UTC m=+1364.970901579" watchObservedRunningTime="2026-03-20 13:45:18.684489677 +0000 UTC m=+1364.974089620" Mar 20 13:45:18 crc kubenswrapper[4690]: I0320 13:45:18.712640 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.76000815 podStartE2EDuration="6.712622189s" podCreationTimestamp="2026-03-20 13:45:12 +0000 UTC" firstStartedPulling="2026-03-20 13:45:13.916986315 +0000 UTC m=+1360.206586258" lastFinishedPulling="2026-03-20 13:45:17.869600354 +0000 UTC m=+1364.159200297" observedRunningTime="2026-03-20 13:45:18.703145649 +0000 UTC m=+1364.992745592" watchObservedRunningTime="2026-03-20 13:45:18.712622189 +0000 UTC m=+1365.002222122" Mar 20 13:45:19 crc kubenswrapper[4690]: I0320 13:45:19.637936 4690 generic.go:334] "Generic (PLEG): container finished" podID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerID="a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d" exitCode=143 Mar 20 13:45:19 crc kubenswrapper[4690]: I0320 13:45:19.638015 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e58b84ab-69a5-4737-afda-6293d71a1daf","Type":"ContainerDied","Data":"a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d"} Mar 20 13:45:20 crc kubenswrapper[4690]: I0320 13:45:20.650493 4690 generic.go:334] "Generic (PLEG): container finished" podID="69168ed4-2cdf-4be8-8ae0-917d89a54670" containerID="f3edb3590e62d270cce5c3faa54306f716894ae1c8e4f9c6f5670ef8c23020e4" exitCode=0 Mar 20 13:45:20 crc kubenswrapper[4690]: I0320 13:45:20.650533 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-sb7x2" event={"ID":"69168ed4-2cdf-4be8-8ae0-917d89a54670","Type":"ContainerDied","Data":"f3edb3590e62d270cce5c3faa54306f716894ae1c8e4f9c6f5670ef8c23020e4"} Mar 20 13:45:21 crc kubenswrapper[4690]: I0320 13:45:21.665114 4690 generic.go:334] "Generic (PLEG): container finished" podID="f50d733d-5439-49fc-af1b-bb36c5b3c739" containerID="7c70b70602baf750e77d197fee8c81b82c32f4e3bb0697171ced29df5fe816d9" exitCode=0 Mar 20 13:45:21 crc kubenswrapper[4690]: I0320 13:45:21.665166 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pr965" event={"ID":"f50d733d-5439-49fc-af1b-bb36c5b3c739","Type":"ContainerDied","Data":"7c70b70602baf750e77d197fee8c81b82c32f4e3bb0697171ced29df5fe816d9"} Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.103762 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.292129 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-scripts\") pod \"69168ed4-2cdf-4be8-8ae0-917d89a54670\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.294810 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsbs9\" (UniqueName: \"kubernetes.io/projected/69168ed4-2cdf-4be8-8ae0-917d89a54670-kube-api-access-fsbs9\") pod \"69168ed4-2cdf-4be8-8ae0-917d89a54670\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.294990 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-config-data\") pod \"69168ed4-2cdf-4be8-8ae0-917d89a54670\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.295056 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-combined-ca-bundle\") pod \"69168ed4-2cdf-4be8-8ae0-917d89a54670\" (UID: \"69168ed4-2cdf-4be8-8ae0-917d89a54670\") " Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.300371 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69168ed4-2cdf-4be8-8ae0-917d89a54670-kube-api-access-fsbs9" (OuterVolumeSpecName: "kube-api-access-fsbs9") pod "69168ed4-2cdf-4be8-8ae0-917d89a54670" (UID: "69168ed4-2cdf-4be8-8ae0-917d89a54670"). InnerVolumeSpecName "kube-api-access-fsbs9". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.301840 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-scripts" (OuterVolumeSpecName: "scripts") pod "69168ed4-2cdf-4be8-8ae0-917d89a54670" (UID: "69168ed4-2cdf-4be8-8ae0-917d89a54670"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.320526 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-config-data" (OuterVolumeSpecName: "config-data") pod "69168ed4-2cdf-4be8-8ae0-917d89a54670" (UID: "69168ed4-2cdf-4be8-8ae0-917d89a54670"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.337483 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "69168ed4-2cdf-4be8-8ae0-917d89a54670" (UID: "69168ed4-2cdf-4be8-8ae0-917d89a54670"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.397290 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.397601 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.397815 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsbs9\" (UniqueName: \"kubernetes.io/projected/69168ed4-2cdf-4be8-8ae0-917d89a54670-kube-api-access-fsbs9\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.398034 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/69168ed4-2cdf-4be8-8ae0-917d89a54670-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.692747 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-sb7x2" event={"ID":"69168ed4-2cdf-4be8-8ae0-917d89a54670","Type":"ContainerDied","Data":"a70aedc7ebd7513bfcfe45ab67fb1e342007f8d4cbd25927a17402babe51ca84"} Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.692792 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a70aedc7ebd7513bfcfe45ab67fb1e342007f8d4cbd25927a17402babe51ca84" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.692884 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-sb7x2" Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.872123 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.872469 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cd53669d-e52f-4802-993c-5333dec1949d" containerName="nova-api-log" containerID="cri-o://8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38" gracePeriod=30 Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.872792 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="cd53669d-e52f-4802-993c-5333dec1949d" containerName="nova-api-api" containerID="cri-o://33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945" gracePeriod=30 Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.888331 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.888527 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="5c9ded12-3b6b-4aa1-9320-5bd58c7c8097" containerName="nova-scheduler-scheduler" containerID="cri-o://7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8" gracePeriod=30 Mar 20 13:45:22 crc kubenswrapper[4690]: I0320 13:45:22.892548 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.187924 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.218897 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2z7wg\" (UniqueName: \"kubernetes.io/projected/f50d733d-5439-49fc-af1b-bb36c5b3c739-kube-api-access-2z7wg\") pod \"f50d733d-5439-49fc-af1b-bb36c5b3c739\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.218970 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-config-data\") pod \"f50d733d-5439-49fc-af1b-bb36c5b3c739\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.219026 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-combined-ca-bundle\") pod \"f50d733d-5439-49fc-af1b-bb36c5b3c739\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.219057 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-scripts\") pod \"f50d733d-5439-49fc-af1b-bb36c5b3c739\" (UID: \"f50d733d-5439-49fc-af1b-bb36c5b3c739\") " Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.232125 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-scripts" (OuterVolumeSpecName: "scripts") pod "f50d733d-5439-49fc-af1b-bb36c5b3c739" (UID: "f50d733d-5439-49fc-af1b-bb36c5b3c739"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.236123 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f50d733d-5439-49fc-af1b-bb36c5b3c739-kube-api-access-2z7wg" (OuterVolumeSpecName: "kube-api-access-2z7wg") pod "f50d733d-5439-49fc-af1b-bb36c5b3c739" (UID: "f50d733d-5439-49fc-af1b-bb36c5b3c739"). InnerVolumeSpecName "kube-api-access-2z7wg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.253139 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-config-data" (OuterVolumeSpecName: "config-data") pod "f50d733d-5439-49fc-af1b-bb36c5b3c739" (UID: "f50d733d-5439-49fc-af1b-bb36c5b3c739"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.254562 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.255796 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f50d733d-5439-49fc-af1b-bb36c5b3c739" (UID: "f50d733d-5439-49fc-af1b-bb36c5b3c739"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.294206 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.325967 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.326008 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2z7wg\" (UniqueName: \"kubernetes.io/projected/f50d733d-5439-49fc-af1b-bb36c5b3c739-kube-api-access-2z7wg\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.326018 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.326028 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f50d733d-5439-49fc-af1b-bb36c5b3c739-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.369960 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-n9p8r"] Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.370209 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" podUID="35385d42-f164-4605-8a55-290d5acc5192" containerName="dnsmasq-dns" containerID="cri-o://9aeb5088c565fdb27201256af0e2df1079d1ca094ff0a39688814c0f027a9bac" gracePeriod=10 Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.483209 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.529429 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd53669d-e52f-4802-993c-5333dec1949d-logs\") pod \"cd53669d-e52f-4802-993c-5333dec1949d\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.529709 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-combined-ca-bundle\") pod \"cd53669d-e52f-4802-993c-5333dec1949d\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.529790 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-config-data\") pod \"cd53669d-e52f-4802-993c-5333dec1949d\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.529840 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hxp64\" (UniqueName: \"kubernetes.io/projected/cd53669d-e52f-4802-993c-5333dec1949d-kube-api-access-hxp64\") pod \"cd53669d-e52f-4802-993c-5333dec1949d\" (UID: \"cd53669d-e52f-4802-993c-5333dec1949d\") " Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.530342 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd53669d-e52f-4802-993c-5333dec1949d-logs" (OuterVolumeSpecName: "logs") pod "cd53669d-e52f-4802-993c-5333dec1949d" (UID: "cd53669d-e52f-4802-993c-5333dec1949d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.532093 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd53669d-e52f-4802-993c-5333dec1949d-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.539583 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd53669d-e52f-4802-993c-5333dec1949d-kube-api-access-hxp64" (OuterVolumeSpecName: "kube-api-access-hxp64") pod "cd53669d-e52f-4802-993c-5333dec1949d" (UID: "cd53669d-e52f-4802-993c-5333dec1949d"). InnerVolumeSpecName "kube-api-access-hxp64". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.562914 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-config-data" (OuterVolumeSpecName: "config-data") pod "cd53669d-e52f-4802-993c-5333dec1949d" (UID: "cd53669d-e52f-4802-993c-5333dec1949d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.567025 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd53669d-e52f-4802-993c-5333dec1949d" (UID: "cd53669d-e52f-4802-993c-5333dec1949d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.634449 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.634485 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd53669d-e52f-4802-993c-5333dec1949d-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.634494 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hxp64\" (UniqueName: \"kubernetes.io/projected/cd53669d-e52f-4802-993c-5333dec1949d-kube-api-access-hxp64\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.748586 4690 generic.go:334] "Generic (PLEG): container finished" podID="cd53669d-e52f-4802-993c-5333dec1949d" containerID="33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945" exitCode=0 Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.748975 4690 generic.go:334] "Generic (PLEG): container finished" podID="cd53669d-e52f-4802-993c-5333dec1949d" containerID="8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38" exitCode=143 Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.748739 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.748795 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd53669d-e52f-4802-993c-5333dec1949d","Type":"ContainerDied","Data":"33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945"} Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.751938 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd53669d-e52f-4802-993c-5333dec1949d","Type":"ContainerDied","Data":"8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38"} Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.751963 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"cd53669d-e52f-4802-993c-5333dec1949d","Type":"ContainerDied","Data":"92f18aa674afbefaf3052418831d8971004e92beb4c8bc068a63c6beb2126751"} Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.751982 4690 scope.go:117] "RemoveContainer" containerID="33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.757339 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-pr965" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.758917 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-pr965" event={"ID":"f50d733d-5439-49fc-af1b-bb36c5b3c739","Type":"ContainerDied","Data":"970057bbee861539270bee85d36d36bedb5b6581443325f2427d7e02b9f1cee2"} Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.759011 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="970057bbee861539270bee85d36d36bedb5b6581443325f2427d7e02b9f1cee2" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.760727 4690 generic.go:334] "Generic (PLEG): container finished" podID="35385d42-f164-4605-8a55-290d5acc5192" containerID="9aeb5088c565fdb27201256af0e2df1079d1ca094ff0a39688814c0f027a9bac" exitCode=0 Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.760798 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" event={"ID":"35385d42-f164-4605-8a55-290d5acc5192","Type":"ContainerDied","Data":"9aeb5088c565fdb27201256af0e2df1079d1ca094ff0a39688814c0f027a9bac"} Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.779718 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Mar 20 13:45:23 crc kubenswrapper[4690]: E0320 13:45:23.780100 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd53669d-e52f-4802-993c-5333dec1949d" containerName="nova-api-api" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.780119 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd53669d-e52f-4802-993c-5333dec1949d" containerName="nova-api-api" Mar 20 13:45:23 crc kubenswrapper[4690]: E0320 13:45:23.780134 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd53669d-e52f-4802-993c-5333dec1949d" containerName="nova-api-log" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.780140 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd53669d-e52f-4802-993c-5333dec1949d" containerName="nova-api-log" Mar 20 13:45:23 crc kubenswrapper[4690]: E0320 13:45:23.780162 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f50d733d-5439-49fc-af1b-bb36c5b3c739" containerName="nova-cell1-conductor-db-sync" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.780169 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="f50d733d-5439-49fc-af1b-bb36c5b3c739" containerName="nova-cell1-conductor-db-sync" Mar 20 13:45:23 crc kubenswrapper[4690]: E0320 13:45:23.780185 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69168ed4-2cdf-4be8-8ae0-917d89a54670" containerName="nova-manage" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.780192 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="69168ed4-2cdf-4be8-8ae0-917d89a54670" containerName="nova-manage" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.780454 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="69168ed4-2cdf-4be8-8ae0-917d89a54670" containerName="nova-manage" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.780472 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd53669d-e52f-4802-993c-5333dec1949d" containerName="nova-api-api" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.780485 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd53669d-e52f-4802-993c-5333dec1949d" containerName="nova-api-log" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.780506 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="f50d733d-5439-49fc-af1b-bb36c5b3c739" containerName="nova-cell1-conductor-db-sync" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.781110 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.788605 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.802384 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.817522 4690 scope.go:117] "RemoveContainer" containerID="8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.825817 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.843253 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ldq8\" (UniqueName: \"kubernetes.io/projected/37380d51-16da-4dc4-a30e-e0759035a9f4-kube-api-access-5ldq8\") pod \"nova-cell1-conductor-0\" (UID: \"37380d51-16da-4dc4-a30e-e0759035a9f4\") " pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.843460 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37380d51-16da-4dc4-a30e-e0759035a9f4-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"37380d51-16da-4dc4-a30e-e0759035a9f4\") " pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.843595 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37380d51-16da-4dc4-a30e-e0759035a9f4-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"37380d51-16da-4dc4-a30e-e0759035a9f4\") " pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.847765 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.866626 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.879120 4690 scope.go:117] "RemoveContainer" containerID="33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945" Mar 20 13:45:23 crc kubenswrapper[4690]: E0320 13:45:23.886460 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945\": container with ID starting with 33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945 not found: ID does not exist" containerID="33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.886514 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945"} err="failed to get container status \"33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945\": rpc error: code = NotFound desc = could not find container \"33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945\": container with ID starting with 33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945 not found: ID does not exist" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.886544 4690 scope.go:117] "RemoveContainer" containerID="8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38" Mar 20 13:45:23 crc kubenswrapper[4690]: E0320 13:45:23.887028 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38\": container with ID starting with 8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38 not found: ID does not exist" containerID="8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.887171 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38"} err="failed to get container status \"8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38\": rpc error: code = NotFound desc = could not find container \"8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38\": container with ID starting with 8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38 not found: ID does not exist" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.887255 4690 scope.go:117] "RemoveContainer" containerID="33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.887561 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945"} err="failed to get container status \"33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945\": rpc error: code = NotFound desc = could not find container \"33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945\": container with ID starting with 33e9485a505ffbd5772ae712a196b93688dac46d518ca75778203ec72eecf945 not found: ID does not exist" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.887657 4690 scope.go:117] "RemoveContainer" containerID="8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.893070 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38"} err="failed to get container status \"8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38\": rpc error: code = NotFound desc = could not find container \"8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38\": container with ID starting with 8dd310afff9ac43f3e7170074a1b83a89fb560249c3bba06f443030cb0a8ad38 not found: ID does not exist" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.894306 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.904940 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:23 crc kubenswrapper[4690]: E0320 13:45:23.905481 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35385d42-f164-4605-8a55-290d5acc5192" containerName="dnsmasq-dns" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.905740 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="35385d42-f164-4605-8a55-290d5acc5192" containerName="dnsmasq-dns" Mar 20 13:45:23 crc kubenswrapper[4690]: E0320 13:45:23.905861 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35385d42-f164-4605-8a55-290d5acc5192" containerName="init" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.905928 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="35385d42-f164-4605-8a55-290d5acc5192" containerName="init" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.906147 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="35385d42-f164-4605-8a55-290d5acc5192" containerName="dnsmasq-dns" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.907150 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.909485 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.936970 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.946054 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ldq8\" (UniqueName: \"kubernetes.io/projected/37380d51-16da-4dc4-a30e-e0759035a9f4-kube-api-access-5ldq8\") pod \"nova-cell1-conductor-0\" (UID: \"37380d51-16da-4dc4-a30e-e0759035a9f4\") " pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.948069 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37380d51-16da-4dc4-a30e-e0759035a9f4-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"37380d51-16da-4dc4-a30e-e0759035a9f4\") " pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.948928 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37380d51-16da-4dc4-a30e-e0759035a9f4-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"37380d51-16da-4dc4-a30e-e0759035a9f4\") " pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.954721 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37380d51-16da-4dc4-a30e-e0759035a9f4-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"37380d51-16da-4dc4-a30e-e0759035a9f4\") " pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.955086 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37380d51-16da-4dc4-a30e-e0759035a9f4-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"37380d51-16da-4dc4-a30e-e0759035a9f4\") " pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:23 crc kubenswrapper[4690]: I0320 13:45:23.964273 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ldq8\" (UniqueName: \"kubernetes.io/projected/37380d51-16da-4dc4-a30e-e0759035a9f4-kube-api-access-5ldq8\") pod \"nova-cell1-conductor-0\" (UID: \"37380d51-16da-4dc4-a30e-e0759035a9f4\") " pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.051012 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-svc\") pod \"35385d42-f164-4605-8a55-290d5acc5192\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.051277 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-swift-storage-0\") pod \"35385d42-f164-4605-8a55-290d5acc5192\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.051367 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-nb\") pod \"35385d42-f164-4605-8a55-290d5acc5192\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.051531 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44g7f\" (UniqueName: \"kubernetes.io/projected/35385d42-f164-4605-8a55-290d5acc5192-kube-api-access-44g7f\") pod \"35385d42-f164-4605-8a55-290d5acc5192\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.051627 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-sb\") pod \"35385d42-f164-4605-8a55-290d5acc5192\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.051790 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-config\") pod \"35385d42-f164-4605-8a55-290d5acc5192\" (UID: \"35385d42-f164-4605-8a55-290d5acc5192\") " Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.052070 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc173b57-1e6f-43a4-a292-a6c6eeb56025-logs\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.052274 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-config-data\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.052474 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rrlqg\" (UniqueName: \"kubernetes.io/projected/fc173b57-1e6f-43a4-a292-a6c6eeb56025-kube-api-access-rrlqg\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.052614 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.069932 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35385d42-f164-4605-8a55-290d5acc5192-kube-api-access-44g7f" (OuterVolumeSpecName: "kube-api-access-44g7f") pod "35385d42-f164-4605-8a55-290d5acc5192" (UID: "35385d42-f164-4605-8a55-290d5acc5192"). InnerVolumeSpecName "kube-api-access-44g7f". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.105964 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.110009 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "35385d42-f164-4605-8a55-290d5acc5192" (UID: "35385d42-f164-4605-8a55-290d5acc5192"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.121211 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "35385d42-f164-4605-8a55-290d5acc5192" (UID: "35385d42-f164-4605-8a55-290d5acc5192"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.121670 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "35385d42-f164-4605-8a55-290d5acc5192" (UID: "35385d42-f164-4605-8a55-290d5acc5192"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.124032 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "35385d42-f164-4605-8a55-290d5acc5192" (UID: "35385d42-f164-4605-8a55-290d5acc5192"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.137593 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-config" (OuterVolumeSpecName: "config") pod "35385d42-f164-4605-8a55-290d5acc5192" (UID: "35385d42-f164-4605-8a55-290d5acc5192"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155332 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-config-data\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155428 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rrlqg\" (UniqueName: \"kubernetes.io/projected/fc173b57-1e6f-43a4-a292-a6c6eeb56025-kube-api-access-rrlqg\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155489 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155583 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc173b57-1e6f-43a4-a292-a6c6eeb56025-logs\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155713 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155727 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155736 4690 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155747 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155756 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44g7f\" (UniqueName: \"kubernetes.io/projected/35385d42-f164-4605-8a55-290d5acc5192-kube-api-access-44g7f\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.155766 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/35385d42-f164-4605-8a55-290d5acc5192-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.156255 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc173b57-1e6f-43a4-a292-a6c6eeb56025-logs\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.163602 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.170472 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-config-data\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.178569 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rrlqg\" (UniqueName: \"kubernetes.io/projected/fc173b57-1e6f-43a4-a292-a6c6eeb56025-kube-api-access-rrlqg\") pod \"nova-api-0\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.231341 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.427280 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd53669d-e52f-4802-993c-5333dec1949d" path="/var/lib/kubelet/pods/cd53669d-e52f-4802-993c-5333dec1949d/volumes" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.551822 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Mar 20 13:45:24 crc kubenswrapper[4690]: W0320 13:45:24.702565 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc173b57_1e6f_43a4_a292_a6c6eeb56025.slice/crio-025497c4115389f210586a80150d764fa2c3305362fc961d1c04563c6f9d7ca2 WatchSource:0}: Error finding container 025497c4115389f210586a80150d764fa2c3305362fc961d1c04563c6f9d7ca2: Status 404 returned error can't find the container with id 025497c4115389f210586a80150d764fa2c3305362fc961d1c04563c6f9d7ca2 Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.719477 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.778892 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"37380d51-16da-4dc4-a30e-e0759035a9f4","Type":"ContainerStarted","Data":"c04b179b48cc9441dc47646d83f5774e3387aff6dda22ae80d7ac51200f997e5"} Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.778983 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"37380d51-16da-4dc4-a30e-e0759035a9f4","Type":"ContainerStarted","Data":"3705b33de50d267ad0c76991b21a0b8f42b113ae29b0de045065334fd2a86d0e"} Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.779098 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.780317 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fc173b57-1e6f-43a4-a292-a6c6eeb56025","Type":"ContainerStarted","Data":"025497c4115389f210586a80150d764fa2c3305362fc961d1c04563c6f9d7ca2"} Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.787609 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" event={"ID":"35385d42-f164-4605-8a55-290d5acc5192","Type":"ContainerDied","Data":"5a5cb0f1722eec25638c791eb4fc843b1f74a82eb8b4d8de328dac0256b4631b"} Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.787674 4690 scope.go:117] "RemoveContainer" containerID="9aeb5088c565fdb27201256af0e2df1079d1ca094ff0a39688814c0f027a9bac" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.787892 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6578955fd5-n9p8r" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.803443 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=1.803427005 podStartE2EDuration="1.803427005s" podCreationTimestamp="2026-03-20 13:45:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:24.799675418 +0000 UTC m=+1371.089275401" watchObservedRunningTime="2026-03-20 13:45:24.803427005 +0000 UTC m=+1371.093026948" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.821615 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-n9p8r"] Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.824525 4690 scope.go:117] "RemoveContainer" containerID="bc0b536e1572bb5bbc6352489068799b6720a5d92ef13bec818de0c2f4aa5b81" Mar 20 13:45:24 crc kubenswrapper[4690]: I0320 13:45:24.835494 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6578955fd5-n9p8r"] Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.784884 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.799367 4690 generic.go:334] "Generic (PLEG): container finished" podID="5c9ded12-3b6b-4aa1-9320-5bd58c7c8097" containerID="7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8" exitCode=0 Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.799496 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097","Type":"ContainerDied","Data":"7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8"} Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.799539 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097","Type":"ContainerDied","Data":"106a162122e6b4647fb66a44a827909992fa00aecf621f379c1808bb2d775ab1"} Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.799565 4690 scope.go:117] "RemoveContainer" containerID="7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8" Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.799719 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.811549 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fc173b57-1e6f-43a4-a292-a6c6eeb56025","Type":"ContainerStarted","Data":"b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c"} Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.811602 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fc173b57-1e6f-43a4-a292-a6c6eeb56025","Type":"ContainerStarted","Data":"dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651"} Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.824138 4690 scope.go:117] "RemoveContainer" containerID="7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8" Mar 20 13:45:25 crc kubenswrapper[4690]: E0320 13:45:25.824721 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8\": container with ID starting with 7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8 not found: ID does not exist" containerID="7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8" Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.824784 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8"} err="failed to get container status \"7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8\": rpc error: code = NotFound desc = could not find container \"7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8\": container with ID starting with 7b883b44db1008b57bd08b180f57b9e55517cba1831bb21cde03a37d224764c8 not found: ID does not exist" Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.849266 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.849245664 podStartE2EDuration="2.849245664s" podCreationTimestamp="2026-03-20 13:45:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:25.832465726 +0000 UTC m=+1372.122065669" watchObservedRunningTime="2026-03-20 13:45:25.849245664 +0000 UTC m=+1372.138845607" Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.890384 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-combined-ca-bundle\") pod \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.890576 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-config-data\") pod \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.890608 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg22p\" (UniqueName: \"kubernetes.io/projected/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-kube-api-access-xg22p\") pod \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\" (UID: \"5c9ded12-3b6b-4aa1-9320-5bd58c7c8097\") " Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.895830 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-kube-api-access-xg22p" (OuterVolumeSpecName: "kube-api-access-xg22p") pod "5c9ded12-3b6b-4aa1-9320-5bd58c7c8097" (UID: "5c9ded12-3b6b-4aa1-9320-5bd58c7c8097"). InnerVolumeSpecName "kube-api-access-xg22p". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.920830 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5c9ded12-3b6b-4aa1-9320-5bd58c7c8097" (UID: "5c9ded12-3b6b-4aa1-9320-5bd58c7c8097"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:25 crc kubenswrapper[4690]: I0320 13:45:25.924590 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-config-data" (OuterVolumeSpecName: "config-data") pod "5c9ded12-3b6b-4aa1-9320-5bd58c7c8097" (UID: "5c9ded12-3b6b-4aa1-9320-5bd58c7c8097"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.003371 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg22p\" (UniqueName: \"kubernetes.io/projected/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-kube-api-access-xg22p\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.003406 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.003421 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.137999 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.144793 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.159430 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:45:26 crc kubenswrapper[4690]: E0320 13:45:26.159928 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c9ded12-3b6b-4aa1-9320-5bd58c7c8097" containerName="nova-scheduler-scheduler" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.159954 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c9ded12-3b6b-4aa1-9320-5bd58c7c8097" containerName="nova-scheduler-scheduler" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.160235 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c9ded12-3b6b-4aa1-9320-5bd58c7c8097" containerName="nova-scheduler-scheduler" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.161015 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.164669 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.181956 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.310407 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.310489 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-config-data\") pod \"nova-scheduler-0\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.310575 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvjl8\" (UniqueName: \"kubernetes.io/projected/729f77d5-e919-412b-9c15-eeaf2e56f00b-kube-api-access-zvjl8\") pod \"nova-scheduler-0\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.412355 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-config-data\") pod \"nova-scheduler-0\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.412486 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvjl8\" (UniqueName: \"kubernetes.io/projected/729f77d5-e919-412b-9c15-eeaf2e56f00b-kube-api-access-zvjl8\") pod \"nova-scheduler-0\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.412621 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.417043 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-config-data\") pod \"nova-scheduler-0\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.417101 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.428288 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35385d42-f164-4605-8a55-290d5acc5192" path="/var/lib/kubelet/pods/35385d42-f164-4605-8a55-290d5acc5192/volumes" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.429552 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c9ded12-3b6b-4aa1-9320-5bd58c7c8097" path="/var/lib/kubelet/pods/5c9ded12-3b6b-4aa1-9320-5bd58c7c8097/volumes" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.443744 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvjl8\" (UniqueName: \"kubernetes.io/projected/729f77d5-e919-412b-9c15-eeaf2e56f00b-kube-api-access-zvjl8\") pod \"nova-scheduler-0\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.477187 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:45:26 crc kubenswrapper[4690]: I0320 13:45:26.967944 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:45:26 crc kubenswrapper[4690]: W0320 13:45:26.973421 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod729f77d5_e919_412b_9c15_eeaf2e56f00b.slice/crio-f5448d554b2189fbe00785a5535b40883ee8f52d971b3b1a02f7d19e992cc068 WatchSource:0}: Error finding container f5448d554b2189fbe00785a5535b40883ee8f52d971b3b1a02f7d19e992cc068: Status 404 returned error can't find the container with id f5448d554b2189fbe00785a5535b40883ee8f52d971b3b1a02f7d19e992cc068 Mar 20 13:45:27 crc kubenswrapper[4690]: I0320 13:45:27.555252 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Mar 20 13:45:27 crc kubenswrapper[4690]: I0320 13:45:27.555747 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="d3e883d8-973f-4e69-a13f-175f1904a203" containerName="kube-state-metrics" containerID="cri-o://51297d0654a5eeb4092b6a63f2abd1a703efedb7d756791f44adfa9e0b6f7860" gracePeriod=30 Mar 20 13:45:27 crc kubenswrapper[4690]: I0320 13:45:27.848458 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"729f77d5-e919-412b-9c15-eeaf2e56f00b","Type":"ContainerStarted","Data":"988d7dfb94537d70fbdc98c81bad6806f1c68166f6214a0ed841394acf0ceb5e"} Mar 20 13:45:27 crc kubenswrapper[4690]: I0320 13:45:27.848511 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"729f77d5-e919-412b-9c15-eeaf2e56f00b","Type":"ContainerStarted","Data":"f5448d554b2189fbe00785a5535b40883ee8f52d971b3b1a02f7d19e992cc068"} Mar 20 13:45:27 crc kubenswrapper[4690]: I0320 13:45:27.850356 4690 generic.go:334] "Generic (PLEG): container finished" podID="d3e883d8-973f-4e69-a13f-175f1904a203" containerID="51297d0654a5eeb4092b6a63f2abd1a703efedb7d756791f44adfa9e0b6f7860" exitCode=2 Mar 20 13:45:27 crc kubenswrapper[4690]: I0320 13:45:27.850512 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d3e883d8-973f-4e69-a13f-175f1904a203","Type":"ContainerDied","Data":"51297d0654a5eeb4092b6a63f2abd1a703efedb7d756791f44adfa9e0b6f7860"} Mar 20 13:45:27 crc kubenswrapper[4690]: I0320 13:45:27.912327 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=1.912306668 podStartE2EDuration="1.912306668s" podCreationTimestamp="2026-03-20 13:45:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:27.879622566 +0000 UTC m=+1374.169222509" watchObservedRunningTime="2026-03-20 13:45:27.912306668 +0000 UTC m=+1374.201906601" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.089677 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.244510 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnn4t\" (UniqueName: \"kubernetes.io/projected/d3e883d8-973f-4e69-a13f-175f1904a203-kube-api-access-nnn4t\") pod \"d3e883d8-973f-4e69-a13f-175f1904a203\" (UID: \"d3e883d8-973f-4e69-a13f-175f1904a203\") " Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.249900 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3e883d8-973f-4e69-a13f-175f1904a203-kube-api-access-nnn4t" (OuterVolumeSpecName: "kube-api-access-nnn4t") pod "d3e883d8-973f-4e69-a13f-175f1904a203" (UID: "d3e883d8-973f-4e69-a13f-175f1904a203"). InnerVolumeSpecName "kube-api-access-nnn4t". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.346560 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnn4t\" (UniqueName: \"kubernetes.io/projected/d3e883d8-973f-4e69-a13f-175f1904a203-kube-api-access-nnn4t\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.881873 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.881864 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d3e883d8-973f-4e69-a13f-175f1904a203","Type":"ContainerDied","Data":"1040739cf1913d44c6e570cdcf88c4e5e759975150e4576d1c8fa8883b7748fe"} Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.882079 4690 scope.go:117] "RemoveContainer" containerID="51297d0654a5eeb4092b6a63f2abd1a703efedb7d756791f44adfa9e0b6f7860" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.921147 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.936171 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.960596 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Mar 20 13:45:28 crc kubenswrapper[4690]: E0320 13:45:28.961734 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3e883d8-973f-4e69-a13f-175f1904a203" containerName="kube-state-metrics" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.961768 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3e883d8-973f-4e69-a13f-175f1904a203" containerName="kube-state-metrics" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.962520 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3e883d8-973f-4e69-a13f-175f1904a203" containerName="kube-state-metrics" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.963698 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.967366 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.967816 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Mar 20 13:45:28 crc kubenswrapper[4690]: I0320 13:45:28.998057 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.073125 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c60cea38-9cd8-4c38-94d4-3eefa840b455-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.073220 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c60cea38-9cd8-4c38-94d4-3eefa840b455-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.073654 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c60cea38-9cd8-4c38-94d4-3eefa840b455-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.073728 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vjrp\" (UniqueName: \"kubernetes.io/projected/c60cea38-9cd8-4c38-94d4-3eefa840b455-kube-api-access-9vjrp\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.155519 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.182910 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vjrp\" (UniqueName: \"kubernetes.io/projected/c60cea38-9cd8-4c38-94d4-3eefa840b455-kube-api-access-9vjrp\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.183319 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c60cea38-9cd8-4c38-94d4-3eefa840b455-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.185627 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c60cea38-9cd8-4c38-94d4-3eefa840b455-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.185810 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c60cea38-9cd8-4c38-94d4-3eefa840b455-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.191022 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/c60cea38-9cd8-4c38-94d4-3eefa840b455-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.196412 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/c60cea38-9cd8-4c38-94d4-3eefa840b455-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.196496 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c60cea38-9cd8-4c38-94d4-3eefa840b455-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.203428 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vjrp\" (UniqueName: \"kubernetes.io/projected/c60cea38-9cd8-4c38-94d4-3eefa840b455-kube-api-access-9vjrp\") pod \"kube-state-metrics-0\" (UID: \"c60cea38-9cd8-4c38-94d4-3eefa840b455\") " pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.294836 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.440696 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.441375 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="ceilometer-central-agent" containerID="cri-o://b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13" gracePeriod=30 Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.441522 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="proxy-httpd" containerID="cri-o://6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53" gracePeriod=30 Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.441569 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="sg-core" containerID="cri-o://1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1" gracePeriod=30 Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.441608 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="ceilometer-notification-agent" containerID="cri-o://992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f" gracePeriod=30 Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.794348 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.906322 4690 generic.go:334] "Generic (PLEG): container finished" podID="8f339415-4513-4705-865e-458eb3e25741" containerID="6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53" exitCode=0 Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.906752 4690 generic.go:334] "Generic (PLEG): container finished" podID="8f339415-4513-4705-865e-458eb3e25741" containerID="1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1" exitCode=2 Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.906765 4690 generic.go:334] "Generic (PLEG): container finished" podID="8f339415-4513-4705-865e-458eb3e25741" containerID="b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13" exitCode=0 Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.906394 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerDied","Data":"6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53"} Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.906860 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerDied","Data":"1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1"} Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.906882 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerDied","Data":"b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13"} Mar 20 13:45:29 crc kubenswrapper[4690]: I0320 13:45:29.908369 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c60cea38-9cd8-4c38-94d4-3eefa840b455","Type":"ContainerStarted","Data":"4105c4d94bb71543bac17e206402f15f711f4fd117d5db8d37dbff3b9e146679"} Mar 20 13:45:30 crc kubenswrapper[4690]: I0320 13:45:30.430469 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3e883d8-973f-4e69-a13f-175f1904a203" path="/var/lib/kubelet/pods/d3e883d8-973f-4e69-a13f-175f1904a203/volumes" Mar 20 13:45:30 crc kubenswrapper[4690]: I0320 13:45:30.923645 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"c60cea38-9cd8-4c38-94d4-3eefa840b455","Type":"ContainerStarted","Data":"c362bf28a42cfbdca2278562e1fb09ca04fab1dfeb88f585c523dad22ec8906b"} Mar 20 13:45:30 crc kubenswrapper[4690]: I0320 13:45:30.923940 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Mar 20 13:45:30 crc kubenswrapper[4690]: I0320 13:45:30.957195 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.591009161 podStartE2EDuration="2.957172726s" podCreationTimestamp="2026-03-20 13:45:28 +0000 UTC" firstStartedPulling="2026-03-20 13:45:29.812936119 +0000 UTC m=+1376.102536082" lastFinishedPulling="2026-03-20 13:45:30.179099704 +0000 UTC m=+1376.468699647" observedRunningTime="2026-03-20 13:45:30.943776254 +0000 UTC m=+1377.233376207" watchObservedRunningTime="2026-03-20 13:45:30.957172726 +0000 UTC m=+1377.246772679" Mar 20 13:45:31 crc kubenswrapper[4690]: I0320 13:45:31.209126 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Mar 20 13:45:31 crc kubenswrapper[4690]: I0320 13:45:31.209179 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Mar 20 13:45:31 crc kubenswrapper[4690]: I0320 13:45:31.478028 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.519777 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.567487 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-log-httpd\") pod \"8f339415-4513-4705-865e-458eb3e25741\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.567600 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l48c8\" (UniqueName: \"kubernetes.io/projected/8f339415-4513-4705-865e-458eb3e25741-kube-api-access-l48c8\") pod \"8f339415-4513-4705-865e-458eb3e25741\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.567646 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-config-data\") pod \"8f339415-4513-4705-865e-458eb3e25741\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.567788 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-run-httpd\") pod \"8f339415-4513-4705-865e-458eb3e25741\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.567903 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-combined-ca-bundle\") pod \"8f339415-4513-4705-865e-458eb3e25741\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.567948 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-sg-core-conf-yaml\") pod \"8f339415-4513-4705-865e-458eb3e25741\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.568036 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-scripts\") pod \"8f339415-4513-4705-865e-458eb3e25741\" (UID: \"8f339415-4513-4705-865e-458eb3e25741\") " Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.573213 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8f339415-4513-4705-865e-458eb3e25741" (UID: "8f339415-4513-4705-865e-458eb3e25741"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.573619 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8f339415-4513-4705-865e-458eb3e25741" (UID: "8f339415-4513-4705-865e-458eb3e25741"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.576541 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f339415-4513-4705-865e-458eb3e25741-kube-api-access-l48c8" (OuterVolumeSpecName: "kube-api-access-l48c8") pod "8f339415-4513-4705-865e-458eb3e25741" (UID: "8f339415-4513-4705-865e-458eb3e25741"). InnerVolumeSpecName "kube-api-access-l48c8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.577194 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-scripts" (OuterVolumeSpecName: "scripts") pod "8f339415-4513-4705-865e-458eb3e25741" (UID: "8f339415-4513-4705-865e-458eb3e25741"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.608156 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8f339415-4513-4705-865e-458eb3e25741" (UID: "8f339415-4513-4705-865e-458eb3e25741"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.670346 4690 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.670375 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.670385 4690 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.670395 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l48c8\" (UniqueName: \"kubernetes.io/projected/8f339415-4513-4705-865e-458eb3e25741-kube-api-access-l48c8\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.670406 4690 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8f339415-4513-4705-865e-458eb3e25741-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.681095 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8f339415-4513-4705-865e-458eb3e25741" (UID: "8f339415-4513-4705-865e-458eb3e25741"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.697420 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-config-data" (OuterVolumeSpecName: "config-data") pod "8f339415-4513-4705-865e-458eb3e25741" (UID: "8f339415-4513-4705-865e-458eb3e25741"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.771197 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.771228 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8f339415-4513-4705-865e-458eb3e25741-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.965779 4690 generic.go:334] "Generic (PLEG): container finished" podID="8f339415-4513-4705-865e-458eb3e25741" containerID="992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f" exitCode=0 Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.965891 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerDied","Data":"992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f"} Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.965945 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8f339415-4513-4705-865e-458eb3e25741","Type":"ContainerDied","Data":"eef3630ef08b68fec4163e4bdfa73bd4baacd33e6b2c329f9e7325a4d301697a"} Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.965984 4690 scope.go:117] "RemoveContainer" containerID="6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53" Mar 20 13:45:33 crc kubenswrapper[4690]: I0320 13:45:33.966255 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.014579 4690 scope.go:117] "RemoveContainer" containerID="1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.030889 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.036829 4690 scope.go:117] "RemoveContainer" containerID="992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.043491 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.054711 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:34 crc kubenswrapper[4690]: E0320 13:45:34.055122 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="ceilometer-central-agent" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.055139 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="ceilometer-central-agent" Mar 20 13:45:34 crc kubenswrapper[4690]: E0320 13:45:34.055153 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="proxy-httpd" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.055159 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="proxy-httpd" Mar 20 13:45:34 crc kubenswrapper[4690]: E0320 13:45:34.055186 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="sg-core" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.055193 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="sg-core" Mar 20 13:45:34 crc kubenswrapper[4690]: E0320 13:45:34.055207 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="ceilometer-notification-agent" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.055213 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="ceilometer-notification-agent" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.055366 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="ceilometer-notification-agent" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.055377 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="ceilometer-central-agent" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.055385 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="sg-core" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.055399 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f339415-4513-4705-865e-458eb3e25741" containerName="proxy-httpd" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.056971 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.059331 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.059509 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.059682 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.061757 4690 scope.go:117] "RemoveContainer" containerID="b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.068442 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.084922 4690 scope.go:117] "RemoveContainer" containerID="6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53" Mar 20 13:45:34 crc kubenswrapper[4690]: E0320 13:45:34.085446 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53\": container with ID starting with 6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53 not found: ID does not exist" containerID="6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.085481 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53"} err="failed to get container status \"6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53\": rpc error: code = NotFound desc = could not find container \"6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53\": container with ID starting with 6a8877c296dc64f2f802924c2bc654df0337c69eeec40cc75c0bdceff060fa53 not found: ID does not exist" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.085506 4690 scope.go:117] "RemoveContainer" containerID="1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1" Mar 20 13:45:34 crc kubenswrapper[4690]: E0320 13:45:34.085830 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1\": container with ID starting with 1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1 not found: ID does not exist" containerID="1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.085885 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1"} err="failed to get container status \"1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1\": rpc error: code = NotFound desc = could not find container \"1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1\": container with ID starting with 1613b02a4df38ed729ae0ac5935d85e6840615e3b1eba4d41c51008b54a2d0d1 not found: ID does not exist" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.085911 4690 scope.go:117] "RemoveContainer" containerID="992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f" Mar 20 13:45:34 crc kubenswrapper[4690]: E0320 13:45:34.086173 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f\": container with ID starting with 992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f not found: ID does not exist" containerID="992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.086201 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f"} err="failed to get container status \"992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f\": rpc error: code = NotFound desc = could not find container \"992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f\": container with ID starting with 992c0db4aa510d630068f1b332f18f32f0fa2369db5734f01c3e7a68ecdb038f not found: ID does not exist" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.086218 4690 scope.go:117] "RemoveContainer" containerID="b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13" Mar 20 13:45:34 crc kubenswrapper[4690]: E0320 13:45:34.086456 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13\": container with ID starting with b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13 not found: ID does not exist" containerID="b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.086481 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13"} err="failed to get container status \"b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13\": rpc error: code = NotFound desc = could not find container \"b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13\": container with ID starting with b1cded04cc28817edd0b5e3029de2752eb5086dcae851dbafce1029ab93eff13 not found: ID does not exist" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.177981 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-run-httpd\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.178127 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.178157 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-scripts\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.178230 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.178258 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9f4j\" (UniqueName: \"kubernetes.io/projected/a64189b3-3670-4924-ab49-495e4e8723dc-kube-api-access-z9f4j\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.178333 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-log-httpd\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.178388 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-config-data\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.178471 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.232957 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.232996 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.281174 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-run-httpd\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.281251 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.281294 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-scripts\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.281334 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.281365 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9f4j\" (UniqueName: \"kubernetes.io/projected/a64189b3-3670-4924-ab49-495e4e8723dc-kube-api-access-z9f4j\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.281910 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-run-httpd\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.282330 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-log-httpd\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.282832 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-log-httpd\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.283134 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-config-data\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.283327 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.287532 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-scripts\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.289425 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.291768 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-config-data\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.296828 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.299543 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.310087 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9f4j\" (UniqueName: \"kubernetes.io/projected/a64189b3-3670-4924-ab49-495e4e8723dc-kube-api-access-z9f4j\") pod \"ceilometer-0\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.386178 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.432177 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f339415-4513-4705-865e-458eb3e25741" path="/var/lib/kubelet/pods/8f339415-4513-4705-865e-458eb3e25741/volumes" Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.911450 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:34 crc kubenswrapper[4690]: I0320 13:45:34.980601 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerStarted","Data":"9f7838ec13a7f9401519f68ab5f26d6e31448294fb38454af3116e588c23953c"} Mar 20 13:45:35 crc kubenswrapper[4690]: I0320 13:45:35.314996 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Mar 20 13:45:35 crc kubenswrapper[4690]: I0320 13:45:35.315013 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.203:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Mar 20 13:45:35 crc kubenswrapper[4690]: I0320 13:45:35.992424 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerStarted","Data":"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8"} Mar 20 13:45:36 crc kubenswrapper[4690]: I0320 13:45:36.478186 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Mar 20 13:45:36 crc kubenswrapper[4690]: I0320 13:45:36.506333 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Mar 20 13:45:37 crc kubenswrapper[4690]: I0320 13:45:37.024578 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerStarted","Data":"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a"} Mar 20 13:45:37 crc kubenswrapper[4690]: I0320 13:45:37.059519 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Mar 20 13:45:38 crc kubenswrapper[4690]: I0320 13:45:38.037795 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerStarted","Data":"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39"} Mar 20 13:45:39 crc kubenswrapper[4690]: I0320 13:45:39.315153 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Mar 20 13:45:40 crc kubenswrapper[4690]: I0320 13:45:40.057238 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerStarted","Data":"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4"} Mar 20 13:45:40 crc kubenswrapper[4690]: I0320 13:45:40.057763 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Mar 20 13:45:40 crc kubenswrapper[4690]: I0320 13:45:40.085761 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.38534973 podStartE2EDuration="6.085722468s" podCreationTimestamp="2026-03-20 13:45:34 +0000 UTC" firstStartedPulling="2026-03-20 13:45:34.91904478 +0000 UTC m=+1381.208644753" lastFinishedPulling="2026-03-20 13:45:39.619417548 +0000 UTC m=+1385.909017491" observedRunningTime="2026-03-20 13:45:40.076547116 +0000 UTC m=+1386.366147069" watchObservedRunningTime="2026-03-20 13:45:40.085722468 +0000 UTC m=+1386.375322411" Mar 20 13:45:42 crc kubenswrapper[4690]: I0320 13:45:42.232781 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Mar 20 13:45:42 crc kubenswrapper[4690]: I0320 13:45:42.233331 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Mar 20 13:45:44 crc kubenswrapper[4690]: I0320 13:45:44.237980 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Mar 20 13:45:44 crc kubenswrapper[4690]: I0320 13:45:44.238404 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Mar 20 13:45:44 crc kubenswrapper[4690]: I0320 13:45:44.244252 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.118534 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.351820 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-qk7qx"] Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.353718 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.369816 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-qk7qx"] Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.417825 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.418033 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.418067 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-config\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.418103 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gvvl\" (UniqueName: \"kubernetes.io/projected/91d61f03-56dd-4e92-a723-2cf8f6f018ca-kube-api-access-2gvvl\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.418265 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.418384 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.520231 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.520284 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-config\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.520319 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gvvl\" (UniqueName: \"kubernetes.io/projected/91d61f03-56dd-4e92-a723-2cf8f6f018ca-kube-api-access-2gvvl\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.520390 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.521314 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.521394 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.522081 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-dns-swift-storage-0\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.522111 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-config\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.522122 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-dns-svc\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.522562 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-ovsdbserver-sb\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.522767 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/91d61f03-56dd-4e92-a723-2cf8f6f018ca-ovsdbserver-nb\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.553440 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gvvl\" (UniqueName: \"kubernetes.io/projected/91d61f03-56dd-4e92-a723-2cf8f6f018ca-kube-api-access-2gvvl\") pod \"dnsmasq-dns-cd5cbd7b9-qk7qx\" (UID: \"91d61f03-56dd-4e92-a723-2cf8f6f018ca\") " pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:45 crc kubenswrapper[4690]: I0320 13:45:45.680246 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:46 crc kubenswrapper[4690]: I0320 13:45:46.198548 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-cd5cbd7b9-qk7qx"] Mar 20 13:45:47 crc kubenswrapper[4690]: I0320 13:45:47.132002 4690 generic.go:334] "Generic (PLEG): container finished" podID="91d61f03-56dd-4e92-a723-2cf8f6f018ca" containerID="dc83b1ec21ee58f2176f685d3861a29f1d628f1b88ee49f4e1cfa6fb6ac44a96" exitCode=0 Mar 20 13:45:47 crc kubenswrapper[4690]: I0320 13:45:47.132219 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" event={"ID":"91d61f03-56dd-4e92-a723-2cf8f6f018ca","Type":"ContainerDied","Data":"dc83b1ec21ee58f2176f685d3861a29f1d628f1b88ee49f4e1cfa6fb6ac44a96"} Mar 20 13:45:47 crc kubenswrapper[4690]: I0320 13:45:47.132430 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" event={"ID":"91d61f03-56dd-4e92-a723-2cf8f6f018ca","Type":"ContainerStarted","Data":"4487b7962c8b72c4af2ca1aeda0e289bd2bfa8e4ab4ea1da323581af2955fac5"} Mar 20 13:45:47 crc kubenswrapper[4690]: I0320 13:45:47.215428 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:47 crc kubenswrapper[4690]: I0320 13:45:47.215814 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="ceilometer-central-agent" containerID="cri-o://f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8" gracePeriod=30 Mar 20 13:45:47 crc kubenswrapper[4690]: I0320 13:45:47.216600 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="proxy-httpd" containerID="cri-o://86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4" gracePeriod=30 Mar 20 13:45:47 crc kubenswrapper[4690]: I0320 13:45:47.216689 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="sg-core" containerID="cri-o://f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39" gracePeriod=30 Mar 20 13:45:47 crc kubenswrapper[4690]: I0320 13:45:47.216733 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="ceilometer-notification-agent" containerID="cri-o://bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a" gracePeriod=30 Mar 20 13:45:47 crc kubenswrapper[4690]: I0320 13:45:47.520357 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.033485 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.076038 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-config-data\") pod \"a64189b3-3670-4924-ab49-495e4e8723dc\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.076200 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-combined-ca-bundle\") pod \"a64189b3-3670-4924-ab49-495e4e8723dc\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.076246 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-ceilometer-tls-certs\") pod \"a64189b3-3670-4924-ab49-495e4e8723dc\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.076291 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9f4j\" (UniqueName: \"kubernetes.io/projected/a64189b3-3670-4924-ab49-495e4e8723dc-kube-api-access-z9f4j\") pod \"a64189b3-3670-4924-ab49-495e4e8723dc\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.076383 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-sg-core-conf-yaml\") pod \"a64189b3-3670-4924-ab49-495e4e8723dc\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.076429 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-log-httpd\") pod \"a64189b3-3670-4924-ab49-495e4e8723dc\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.076522 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-run-httpd\") pod \"a64189b3-3670-4924-ab49-495e4e8723dc\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.076575 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-scripts\") pod \"a64189b3-3670-4924-ab49-495e4e8723dc\" (UID: \"a64189b3-3670-4924-ab49-495e4e8723dc\") " Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.077215 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a64189b3-3670-4924-ab49-495e4e8723dc" (UID: "a64189b3-3670-4924-ab49-495e4e8723dc"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.077342 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a64189b3-3670-4924-ab49-495e4e8723dc" (UID: "a64189b3-3670-4924-ab49-495e4e8723dc"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.084961 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-scripts" (OuterVolumeSpecName: "scripts") pod "a64189b3-3670-4924-ab49-495e4e8723dc" (UID: "a64189b3-3670-4924-ab49-495e4e8723dc"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.089059 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a64189b3-3670-4924-ab49-495e4e8723dc-kube-api-access-z9f4j" (OuterVolumeSpecName: "kube-api-access-z9f4j") pod "a64189b3-3670-4924-ab49-495e4e8723dc" (UID: "a64189b3-3670-4924-ab49-495e4e8723dc"). InnerVolumeSpecName "kube-api-access-z9f4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.120688 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a64189b3-3670-4924-ab49-495e4e8723dc" (UID: "a64189b3-3670-4924-ab49-495e4e8723dc"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.156511 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" event={"ID":"91d61f03-56dd-4e92-a723-2cf8f6f018ca","Type":"ContainerStarted","Data":"16316e7b19f3a598d03dede3b7913d811fd97a7e4e1c892a2fce3795a3203df2"} Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.156591 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.172279 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "a64189b3-3670-4924-ab49-495e4e8723dc" (UID: "a64189b3-3670-4924-ab49-495e4e8723dc"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174012 4690 generic.go:334] "Generic (PLEG): container finished" podID="a64189b3-3670-4924-ab49-495e4e8723dc" containerID="86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4" exitCode=0 Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174047 4690 generic.go:334] "Generic (PLEG): container finished" podID="a64189b3-3670-4924-ab49-495e4e8723dc" containerID="f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39" exitCode=2 Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174059 4690 generic.go:334] "Generic (PLEG): container finished" podID="a64189b3-3670-4924-ab49-495e4e8723dc" containerID="bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a" exitCode=0 Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174069 4690 generic.go:334] "Generic (PLEG): container finished" podID="a64189b3-3670-4924-ab49-495e4e8723dc" containerID="f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8" exitCode=0 Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174079 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174169 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerDied","Data":"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4"} Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174205 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerDied","Data":"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39"} Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174219 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerDied","Data":"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a"} Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174230 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerDied","Data":"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8"} Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174241 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a64189b3-3670-4924-ab49-495e4e8723dc","Type":"ContainerDied","Data":"9f7838ec13a7f9401519f68ab5f26d6e31448294fb38454af3116e588c23953c"} Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174257 4690 scope.go:117] "RemoveContainer" containerID="86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.174879 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-log" containerID="cri-o://dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651" gracePeriod=30 Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.175217 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-api" containerID="cri-o://b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c" gracePeriod=30 Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.178976 4690 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.179007 4690 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.179019 4690 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a64189b3-3670-4924-ab49-495e4e8723dc-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.179030 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.179041 4690 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.179052 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9f4j\" (UniqueName: \"kubernetes.io/projected/a64189b3-3670-4924-ab49-495e4e8723dc-kube-api-access-z9f4j\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.181062 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" podStartSLOduration=3.181040228 podStartE2EDuration="3.181040228s" podCreationTimestamp="2026-03-20 13:45:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:48.174652836 +0000 UTC m=+1394.464252789" watchObservedRunningTime="2026-03-20 13:45:48.181040228 +0000 UTC m=+1394.470640171" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.201313 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a64189b3-3670-4924-ab49-495e4e8723dc" (UID: "a64189b3-3670-4924-ab49-495e4e8723dc"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.203144 4690 scope.go:117] "RemoveContainer" containerID="f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.225920 4690 scope.go:117] "RemoveContainer" containerID="bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.230237 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-config-data" (OuterVolumeSpecName: "config-data") pod "a64189b3-3670-4924-ab49-495e4e8723dc" (UID: "a64189b3-3670-4924-ab49-495e4e8723dc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.252481 4690 scope.go:117] "RemoveContainer" containerID="f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.273294 4690 scope.go:117] "RemoveContainer" containerID="86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4" Mar 20 13:45:48 crc kubenswrapper[4690]: E0320 13:45:48.273809 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4\": container with ID starting with 86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4 not found: ID does not exist" containerID="86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.273936 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4"} err="failed to get container status \"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4\": rpc error: code = NotFound desc = could not find container \"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4\": container with ID starting with 86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.273988 4690 scope.go:117] "RemoveContainer" containerID="f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39" Mar 20 13:45:48 crc kubenswrapper[4690]: E0320 13:45:48.274527 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39\": container with ID starting with f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39 not found: ID does not exist" containerID="f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.274568 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39"} err="failed to get container status \"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39\": rpc error: code = NotFound desc = could not find container \"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39\": container with ID starting with f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.274601 4690 scope.go:117] "RemoveContainer" containerID="bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a" Mar 20 13:45:48 crc kubenswrapper[4690]: E0320 13:45:48.275127 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a\": container with ID starting with bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a not found: ID does not exist" containerID="bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.275162 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a"} err="failed to get container status \"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a\": rpc error: code = NotFound desc = could not find container \"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a\": container with ID starting with bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.275182 4690 scope.go:117] "RemoveContainer" containerID="f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8" Mar 20 13:45:48 crc kubenswrapper[4690]: E0320 13:45:48.275479 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8\": container with ID starting with f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8 not found: ID does not exist" containerID="f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.275502 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8"} err="failed to get container status \"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8\": rpc error: code = NotFound desc = could not find container \"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8\": container with ID starting with f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.275516 4690 scope.go:117] "RemoveContainer" containerID="86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.275771 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4"} err="failed to get container status \"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4\": rpc error: code = NotFound desc = could not find container \"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4\": container with ID starting with 86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.275806 4690 scope.go:117] "RemoveContainer" containerID="f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.276095 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39"} err="failed to get container status \"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39\": rpc error: code = NotFound desc = could not find container \"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39\": container with ID starting with f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.276117 4690 scope.go:117] "RemoveContainer" containerID="bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.276358 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a"} err="failed to get container status \"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a\": rpc error: code = NotFound desc = could not find container \"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a\": container with ID starting with bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.276393 4690 scope.go:117] "RemoveContainer" containerID="f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.276645 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8"} err="failed to get container status \"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8\": rpc error: code = NotFound desc = could not find container \"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8\": container with ID starting with f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.276665 4690 scope.go:117] "RemoveContainer" containerID="86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.276888 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4"} err="failed to get container status \"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4\": rpc error: code = NotFound desc = could not find container \"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4\": container with ID starting with 86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.276927 4690 scope.go:117] "RemoveContainer" containerID="f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.277231 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39"} err="failed to get container status \"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39\": rpc error: code = NotFound desc = could not find container \"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39\": container with ID starting with f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.277275 4690 scope.go:117] "RemoveContainer" containerID="bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.277542 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a"} err="failed to get container status \"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a\": rpc error: code = NotFound desc = could not find container \"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a\": container with ID starting with bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.277563 4690 scope.go:117] "RemoveContainer" containerID="f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.277779 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8"} err="failed to get container status \"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8\": rpc error: code = NotFound desc = could not find container \"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8\": container with ID starting with f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.277805 4690 scope.go:117] "RemoveContainer" containerID="86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.278283 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4"} err="failed to get container status \"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4\": rpc error: code = NotFound desc = could not find container \"86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4\": container with ID starting with 86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.278320 4690 scope.go:117] "RemoveContainer" containerID="f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.278604 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39"} err="failed to get container status \"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39\": rpc error: code = NotFound desc = could not find container \"f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39\": container with ID starting with f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.278636 4690 scope.go:117] "RemoveContainer" containerID="bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.279132 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a"} err="failed to get container status \"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a\": rpc error: code = NotFound desc = could not find container \"bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a\": container with ID starting with bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.279158 4690 scope.go:117] "RemoveContainer" containerID="f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.279522 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8"} err="failed to get container status \"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8\": rpc error: code = NotFound desc = could not find container \"f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8\": container with ID starting with f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8 not found: ID does not exist" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.280879 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.281007 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a64189b3-3670-4924-ab49-495e4e8723dc-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.529053 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.581406 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.592644 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:48 crc kubenswrapper[4690]: E0320 13:45:48.593065 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="proxy-httpd" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.593078 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="proxy-httpd" Mar 20 13:45:48 crc kubenswrapper[4690]: E0320 13:45:48.593094 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="ceilometer-central-agent" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.593101 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="ceilometer-central-agent" Mar 20 13:45:48 crc kubenswrapper[4690]: E0320 13:45:48.593128 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="sg-core" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.593134 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="sg-core" Mar 20 13:45:48 crc kubenswrapper[4690]: E0320 13:45:48.593147 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="ceilometer-notification-agent" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.593153 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="ceilometer-notification-agent" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.593322 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="ceilometer-notification-agent" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.593339 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="ceilometer-central-agent" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.593348 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="sg-core" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.593358 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" containerName="proxy-httpd" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.594981 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.597626 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.597818 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.598149 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.602700 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:48 crc kubenswrapper[4690]: W0320 13:45:48.686284 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda64189b3_3670_4924_ab49_495e4e8723dc.slice/crio-f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8.scope WatchSource:0}: Error finding container f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8: Status 404 returned error can't find the container with id f74d3285c3cfa95ed91135858c8821b21dc7e4209beee5b1a34ec09be7830bc8 Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.691277 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-log-httpd\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.691497 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-scripts\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.691568 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-run-httpd\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.691629 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.691702 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-config-data\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.691754 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85pwr\" (UniqueName: \"kubernetes.io/projected/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-kube-api-access-85pwr\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.691809 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.691972 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.794038 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-log-httpd\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.794316 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-scripts\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.794341 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-run-httpd\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.794368 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.794389 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-config-data\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.794412 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85pwr\" (UniqueName: \"kubernetes.io/projected/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-kube-api-access-85pwr\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.794432 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.794478 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.794605 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-log-httpd\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.795707 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-run-httpd\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.800554 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-scripts\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.807967 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-config-data\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.812837 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.814484 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85pwr\" (UniqueName: \"kubernetes.io/projected/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-kube-api-access-85pwr\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.826625 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.827012 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " pod="openstack/ceilometer-0" Mar 20 13:45:48 crc kubenswrapper[4690]: I0320 13:45:48.953509 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.079679 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.097946 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.104713 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pb7d7\" (UniqueName: \"kubernetes.io/projected/e58b84ab-69a5-4737-afda-6293d71a1daf-kube-api-access-pb7d7\") pod \"e58b84ab-69a5-4737-afda-6293d71a1daf\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.104825 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-combined-ca-bundle\") pod \"e58b84ab-69a5-4737-afda-6293d71a1daf\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.104974 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e58b84ab-69a5-4737-afda-6293d71a1daf-logs\") pod \"e58b84ab-69a5-4737-afda-6293d71a1daf\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.105004 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-config-data\") pod \"e58b84ab-69a5-4737-afda-6293d71a1daf\" (UID: \"e58b84ab-69a5-4737-afda-6293d71a1daf\") " Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.106128 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e58b84ab-69a5-4737-afda-6293d71a1daf-logs" (OuterVolumeSpecName: "logs") pod "e58b84ab-69a5-4737-afda-6293d71a1daf" (UID: "e58b84ab-69a5-4737-afda-6293d71a1daf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.113972 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e58b84ab-69a5-4737-afda-6293d71a1daf-kube-api-access-pb7d7" (OuterVolumeSpecName: "kube-api-access-pb7d7") pod "e58b84ab-69a5-4737-afda-6293d71a1daf" (UID: "e58b84ab-69a5-4737-afda-6293d71a1daf"). InnerVolumeSpecName "kube-api-access-pb7d7". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.145374 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-config-data" (OuterVolumeSpecName: "config-data") pod "e58b84ab-69a5-4737-afda-6293d71a1daf" (UID: "e58b84ab-69a5-4737-afda-6293d71a1daf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.156257 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e58b84ab-69a5-4737-afda-6293d71a1daf" (UID: "e58b84ab-69a5-4737-afda-6293d71a1daf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.184610 4690 generic.go:334] "Generic (PLEG): container finished" podID="d9011473-96a7-40eb-ab1b-5cf0758991e3" containerID="8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c" exitCode=137 Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.184676 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d9011473-96a7-40eb-ab1b-5cf0758991e3","Type":"ContainerDied","Data":"8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c"} Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.184702 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"d9011473-96a7-40eb-ab1b-5cf0758991e3","Type":"ContainerDied","Data":"53c8946a2c440e153003b853f4e7d5285c8365f5f2abe3349dcfce72d15dd67c"} Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.184721 4690 scope.go:117] "RemoveContainer" containerID="8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.185913 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.192529 4690 generic.go:334] "Generic (PLEG): container finished" podID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerID="dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651" exitCode=143 Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.192552 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fc173b57-1e6f-43a4-a292-a6c6eeb56025","Type":"ContainerDied","Data":"dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651"} Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.207226 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-config-data\") pod \"d9011473-96a7-40eb-ab1b-5cf0758991e3\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.207383 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bpdqx\" (UniqueName: \"kubernetes.io/projected/d9011473-96a7-40eb-ab1b-5cf0758991e3-kube-api-access-bpdqx\") pod \"d9011473-96a7-40eb-ab1b-5cf0758991e3\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.207404 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-combined-ca-bundle\") pod \"d9011473-96a7-40eb-ab1b-5cf0758991e3\" (UID: \"d9011473-96a7-40eb-ab1b-5cf0758991e3\") " Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.208170 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pb7d7\" (UniqueName: \"kubernetes.io/projected/e58b84ab-69a5-4737-afda-6293d71a1daf-kube-api-access-pb7d7\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.208187 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.208196 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e58b84ab-69a5-4737-afda-6293d71a1daf-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.208206 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e58b84ab-69a5-4737-afda-6293d71a1daf-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.211464 4690 generic.go:334] "Generic (PLEG): container finished" podID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerID="07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def" exitCode=137 Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.211782 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.215440 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e58b84ab-69a5-4737-afda-6293d71a1daf","Type":"ContainerDied","Data":"07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def"} Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.215588 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"e58b84ab-69a5-4737-afda-6293d71a1daf","Type":"ContainerDied","Data":"cb97f28e871491c4dc33d7cbf172b5ed8c0c6390f852da91676edad303f2545a"} Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.217904 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9011473-96a7-40eb-ab1b-5cf0758991e3-kube-api-access-bpdqx" (OuterVolumeSpecName: "kube-api-access-bpdqx") pod "d9011473-96a7-40eb-ab1b-5cf0758991e3" (UID: "d9011473-96a7-40eb-ab1b-5cf0758991e3"). InnerVolumeSpecName "kube-api-access-bpdqx". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.227476 4690 scope.go:117] "RemoveContainer" containerID="8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c" Mar 20 13:45:49 crc kubenswrapper[4690]: E0320 13:45:49.227883 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c\": container with ID starting with 8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c not found: ID does not exist" containerID="8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.227910 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c"} err="failed to get container status \"8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c\": rpc error: code = NotFound desc = could not find container \"8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c\": container with ID starting with 8661b99bf0099a536e810add146498de2205773c370662959c458ed1bc7cea1c not found: ID does not exist" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.227928 4690 scope.go:117] "RemoveContainer" containerID="07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.252838 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-config-data" (OuterVolumeSpecName: "config-data") pod "d9011473-96a7-40eb-ab1b-5cf0758991e3" (UID: "d9011473-96a7-40eb-ab1b-5cf0758991e3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.255109 4690 scope.go:117] "RemoveContainer" containerID="a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.256128 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.269795 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d9011473-96a7-40eb-ab1b-5cf0758991e3" (UID: "d9011473-96a7-40eb-ab1b-5cf0758991e3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.272765 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.285174 4690 scope.go:117] "RemoveContainer" containerID="07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def" Mar 20 13:45:49 crc kubenswrapper[4690]: E0320 13:45:49.285684 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def\": container with ID starting with 07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def not found: ID does not exist" containerID="07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.285758 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def"} err="failed to get container status \"07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def\": rpc error: code = NotFound desc = could not find container \"07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def\": container with ID starting with 07df821bc617ff39f87981b83c0ea66998ef9dfba9620db238e6914199639def not found: ID does not exist" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.285796 4690 scope.go:117] "RemoveContainer" containerID="a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d" Mar 20 13:45:49 crc kubenswrapper[4690]: E0320 13:45:49.286177 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d\": container with ID starting with a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d not found: ID does not exist" containerID="a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.286229 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d"} err="failed to get container status \"a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d\": rpc error: code = NotFound desc = could not find container \"a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d\": container with ID starting with a7433ee2f465fbe5941f00e3bbeb8ddfd33188af3127a76ba1c0071abf7a382d not found: ID does not exist" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.289196 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: E0320 13:45:49.289728 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerName="nova-metadata-log" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.289745 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerName="nova-metadata-log" Mar 20 13:45:49 crc kubenswrapper[4690]: E0320 13:45:49.289756 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerName="nova-metadata-metadata" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.289764 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerName="nova-metadata-metadata" Mar 20 13:45:49 crc kubenswrapper[4690]: E0320 13:45:49.289776 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9011473-96a7-40eb-ab1b-5cf0758991e3" containerName="nova-cell1-novncproxy-novncproxy" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.289784 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9011473-96a7-40eb-ab1b-5cf0758991e3" containerName="nova-cell1-novncproxy-novncproxy" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.290019 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerName="nova-metadata-metadata" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.290048 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="e58b84ab-69a5-4737-afda-6293d71a1daf" containerName="nova-metadata-log" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.290065 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9011473-96a7-40eb-ab1b-5cf0758991e3" containerName="nova-cell1-novncproxy-novncproxy" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.291194 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.293990 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.294266 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.302116 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.310511 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.310548 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.310576 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-config-data\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.310614 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sr7l\" (UniqueName: \"kubernetes.io/projected/54194cca-39d2-42a4-9bc6-dc35f2e763fe-kube-api-access-9sr7l\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.310715 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54194cca-39d2-42a4-9bc6-dc35f2e763fe-logs\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.310942 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bpdqx\" (UniqueName: \"kubernetes.io/projected/d9011473-96a7-40eb-ab1b-5cf0758991e3-kube-api-access-bpdqx\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.310966 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.310977 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d9011473-96a7-40eb-ab1b-5cf0758991e3-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.377861 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.411827 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54194cca-39d2-42a4-9bc6-dc35f2e763fe-logs\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.412040 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.412065 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.412297 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54194cca-39d2-42a4-9bc6-dc35f2e763fe-logs\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.412679 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-config-data\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.412892 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sr7l\" (UniqueName: \"kubernetes.io/projected/54194cca-39d2-42a4-9bc6-dc35f2e763fe-kube-api-access-9sr7l\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.417752 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.418339 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-config-data\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.418371 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.431101 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sr7l\" (UniqueName: \"kubernetes.io/projected/54194cca-39d2-42a4-9bc6-dc35f2e763fe-kube-api-access-9sr7l\") pod \"nova-metadata-0\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.480517 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.522230 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.532755 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.544430 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.545607 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.555327 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.555447 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.555586 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.564870 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.613938 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.617833 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkdj5\" (UniqueName: \"kubernetes.io/projected/2c33f4d2-a43a-427f-bd35-86c011e752e6-kube-api-access-kkdj5\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.618026 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.618209 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.618346 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.618585 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.719826 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.719959 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.720008 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkdj5\" (UniqueName: \"kubernetes.io/projected/2c33f4d2-a43a-427f-bd35-86c011e752e6-kube-api-access-kkdj5\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.720029 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.720071 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.726026 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.727698 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.728379 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.730201 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2c33f4d2-a43a-427f-bd35-86c011e752e6-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.747816 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkdj5\" (UniqueName: \"kubernetes.io/projected/2c33f4d2-a43a-427f-bd35-86c011e752e6-kube-api-access-kkdj5\") pod \"nova-cell1-novncproxy-0\" (UID: \"2c33f4d2-a43a-427f-bd35-86c011e752e6\") " pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:49 crc kubenswrapper[4690]: I0320 13:45:49.815285 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:50 crc kubenswrapper[4690]: I0320 13:45:50.049042 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:45:50 crc kubenswrapper[4690]: I0320 13:45:50.225745 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54194cca-39d2-42a4-9bc6-dc35f2e763fe","Type":"ContainerStarted","Data":"d8fa1083f66c1f32ef327d783429ec82fcddc21e2993b92c074dc03c0b37a8a7"} Mar 20 13:45:50 crc kubenswrapper[4690]: I0320 13:45:50.226181 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54194cca-39d2-42a4-9bc6-dc35f2e763fe","Type":"ContainerStarted","Data":"0f1fa142b11d8e895f43c166c94eb2bcf368dca93aac11e704b7fd2d7def23b0"} Mar 20 13:45:50 crc kubenswrapper[4690]: I0320 13:45:50.227433 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerStarted","Data":"fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa"} Mar 20 13:45:50 crc kubenswrapper[4690]: I0320 13:45:50.227453 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerStarted","Data":"46024b2a199f6f28746b7c6cc6c30dbdbef85374aa6c6f925b4a0dba38112806"} Mar 20 13:45:50 crc kubenswrapper[4690]: I0320 13:45:50.282541 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Mar 20 13:45:50 crc kubenswrapper[4690]: I0320 13:45:50.424023 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a64189b3-3670-4924-ab49-495e4e8723dc" path="/var/lib/kubelet/pods/a64189b3-3670-4924-ab49-495e4e8723dc/volumes" Mar 20 13:45:50 crc kubenswrapper[4690]: I0320 13:45:50.425060 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9011473-96a7-40eb-ab1b-5cf0758991e3" path="/var/lib/kubelet/pods/d9011473-96a7-40eb-ab1b-5cf0758991e3/volumes" Mar 20 13:45:50 crc kubenswrapper[4690]: I0320 13:45:50.425687 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e58b84ab-69a5-4737-afda-6293d71a1daf" path="/var/lib/kubelet/pods/e58b84ab-69a5-4737-afda-6293d71a1daf/volumes" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.247627 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54194cca-39d2-42a4-9bc6-dc35f2e763fe","Type":"ContainerStarted","Data":"cf65bfa25af093e923e8b70315be4798ae990b3772e2aa12319cab9b96a6540e"} Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.253387 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2c33f4d2-a43a-427f-bd35-86c011e752e6","Type":"ContainerStarted","Data":"6ba7731567e226de22a305b8696740a50cc1a329d63498755997a4030b58d1c1"} Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.253429 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2c33f4d2-a43a-427f-bd35-86c011e752e6","Type":"ContainerStarted","Data":"edf1a9e269aeb654e3a26a4c45b96c4b30f78ff7085e6814f893c455908a1a7e"} Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.258826 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerStarted","Data":"e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d"} Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.306115 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.306094013 podStartE2EDuration="2.306094013s" podCreationTimestamp="2026-03-20 13:45:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:51.277181658 +0000 UTC m=+1397.566781601" watchObservedRunningTime="2026-03-20 13:45:51.306094013 +0000 UTC m=+1397.595693986" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.310419 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.310401376 podStartE2EDuration="2.310401376s" podCreationTimestamp="2026-03-20 13:45:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:51.303800518 +0000 UTC m=+1397.593400461" watchObservedRunningTime="2026-03-20 13:45:51.310401376 +0000 UTC m=+1397.600001319" Mar 20 13:45:51 crc kubenswrapper[4690]: W0320 13:45:51.397128 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda64189b3_3670_4924_ab49_495e4e8723dc.slice/crio-bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a.scope WatchSource:0}: Error finding container bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a: Status 404 returned error can't find the container with id bac1ab9927d63b5edd12eef3000cd1f9407828d2cf576825120b912541cc4b7a Mar 20 13:45:51 crc kubenswrapper[4690]: W0320 13:45:51.398053 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda64189b3_3670_4924_ab49_495e4e8723dc.slice/crio-f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39.scope WatchSource:0}: Error finding container f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39: Status 404 returned error can't find the container with id f9179d08a1f46695b6cd2065c0a60d67972a526f8cddc519ab4116a2dbaa5f39 Mar 20 13:45:51 crc kubenswrapper[4690]: W0320 13:45:51.402234 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda64189b3_3670_4924_ab49_495e4e8723dc.slice/crio-86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4.scope WatchSource:0}: Error finding container 86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4: Status 404 returned error can't find the container with id 86ee7e958affb55058b0566b9072012420e2d67f3e9c6c6c0a22a85d6b7060c4 Mar 20 13:45:51 crc kubenswrapper[4690]: E0320 13:45:51.684305 4690 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc173b57_1e6f_43a4_a292_a6c6eeb56025.slice/crio-b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9011473_96a7_40eb_ab1b_5cf0758991e3.slice/crio-53c8946a2c440e153003b853f4e7d5285c8365f5f2abe3349dcfce72d15dd67c\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd9011473_96a7_40eb_ab1b_5cf0758991e3.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfc173b57_1e6f_43a4_a292_a6c6eeb56025.slice/crio-conmon-b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c.scope\": RecentStats: unable to find data in memory cache]" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.834633 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.864606 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-config-data\") pod \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.864753 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rrlqg\" (UniqueName: \"kubernetes.io/projected/fc173b57-1e6f-43a4-a292-a6c6eeb56025-kube-api-access-rrlqg\") pod \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.864789 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-combined-ca-bundle\") pod \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.864843 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc173b57-1e6f-43a4-a292-a6c6eeb56025-logs\") pod \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\" (UID: \"fc173b57-1e6f-43a4-a292-a6c6eeb56025\") " Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.865837 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fc173b57-1e6f-43a4-a292-a6c6eeb56025-logs" (OuterVolumeSpecName: "logs") pod "fc173b57-1e6f-43a4-a292-a6c6eeb56025" (UID: "fc173b57-1e6f-43a4-a292-a6c6eeb56025"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.877161 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fc173b57-1e6f-43a4-a292-a6c6eeb56025-kube-api-access-rrlqg" (OuterVolumeSpecName: "kube-api-access-rrlqg") pod "fc173b57-1e6f-43a4-a292-a6c6eeb56025" (UID: "fc173b57-1e6f-43a4-a292-a6c6eeb56025"). InnerVolumeSpecName "kube-api-access-rrlqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.914066 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-config-data" (OuterVolumeSpecName: "config-data") pod "fc173b57-1e6f-43a4-a292-a6c6eeb56025" (UID: "fc173b57-1e6f-43a4-a292-a6c6eeb56025"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.914422 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "fc173b57-1e6f-43a4-a292-a6c6eeb56025" (UID: "fc173b57-1e6f-43a4-a292-a6c6eeb56025"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.967445 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.967719 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rrlqg\" (UniqueName: \"kubernetes.io/projected/fc173b57-1e6f-43a4-a292-a6c6eeb56025-kube-api-access-rrlqg\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.967729 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fc173b57-1e6f-43a4-a292-a6c6eeb56025-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:51 crc kubenswrapper[4690]: I0320 13:45:51.967738 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fc173b57-1e6f-43a4-a292-a6c6eeb56025-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.267980 4690 generic.go:334] "Generic (PLEG): container finished" podID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerID="b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c" exitCode=0 Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.268037 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.268060 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fc173b57-1e6f-43a4-a292-a6c6eeb56025","Type":"ContainerDied","Data":"b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c"} Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.268088 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"fc173b57-1e6f-43a4-a292-a6c6eeb56025","Type":"ContainerDied","Data":"025497c4115389f210586a80150d764fa2c3305362fc961d1c04563c6f9d7ca2"} Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.268105 4690 scope.go:117] "RemoveContainer" containerID="b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.270488 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerStarted","Data":"c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9"} Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.297146 4690 scope.go:117] "RemoveContainer" containerID="dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.310097 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.319537 4690 scope.go:117] "RemoveContainer" containerID="b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c" Mar 20 13:45:52 crc kubenswrapper[4690]: E0320 13:45:52.319984 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c\": container with ID starting with b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c not found: ID does not exist" containerID="b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.320022 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c"} err="failed to get container status \"b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c\": rpc error: code = NotFound desc = could not find container \"b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c\": container with ID starting with b23f519e591ac771ab3cecb1eb222b49e6df8fb9dd8b78413983b44e11e1225c not found: ID does not exist" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.320049 4690 scope.go:117] "RemoveContainer" containerID="dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651" Mar 20 13:45:52 crc kubenswrapper[4690]: E0320 13:45:52.320465 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651\": container with ID starting with dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651 not found: ID does not exist" containerID="dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.320493 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651"} err="failed to get container status \"dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651\": rpc error: code = NotFound desc = could not find container \"dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651\": container with ID starting with dca982e03aeed48966e3e9ccc7c0778ae5c7c3064adfcfd1276dab978e304651 not found: ID does not exist" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.323002 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.340428 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:52 crc kubenswrapper[4690]: E0320 13:45:52.340891 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-log" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.340916 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-log" Mar 20 13:45:52 crc kubenswrapper[4690]: E0320 13:45:52.340936 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-api" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.340943 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-api" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.341185 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-log" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.341377 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" containerName="nova-api-api" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.342571 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.344962 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.345315 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.345388 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.358403 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.393934 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-config-data\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.394137 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-public-tls-certs\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.394252 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.394392 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nffsp\" (UniqueName: \"kubernetes.io/projected/8052dc48-751a-41d4-9af8-6cdc16beb21d-kube-api-access-nffsp\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.394488 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8052dc48-751a-41d4-9af8-6cdc16beb21d-logs\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.394637 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.429049 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fc173b57-1e6f-43a4-a292-a6c6eeb56025" path="/var/lib/kubelet/pods/fc173b57-1e6f-43a4-a292-a6c6eeb56025/volumes" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.496372 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nffsp\" (UniqueName: \"kubernetes.io/projected/8052dc48-751a-41d4-9af8-6cdc16beb21d-kube-api-access-nffsp\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.496771 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8052dc48-751a-41d4-9af8-6cdc16beb21d-logs\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.496823 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.496889 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-config-data\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.496972 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-public-tls-certs\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.497019 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.499003 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8052dc48-751a-41d4-9af8-6cdc16beb21d-logs\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.501515 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-config-data\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.501627 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.502154 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.502754 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-public-tls-certs\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.512414 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nffsp\" (UniqueName: \"kubernetes.io/projected/8052dc48-751a-41d4-9af8-6cdc16beb21d-kube-api-access-nffsp\") pod \"nova-api-0\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " pod="openstack/nova-api-0" Mar 20 13:45:52 crc kubenswrapper[4690]: I0320 13:45:52.700654 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:45:53 crc kubenswrapper[4690]: W0320 13:45:53.195064 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8052dc48_751a_41d4_9af8_6cdc16beb21d.slice/crio-18047acb674e7a7d7b0d7fb7308f17e7f7bdc41ca2b4a65f6bc113693948a9d1 WatchSource:0}: Error finding container 18047acb674e7a7d7b0d7fb7308f17e7f7bdc41ca2b4a65f6bc113693948a9d1: Status 404 returned error can't find the container with id 18047acb674e7a7d7b0d7fb7308f17e7f7bdc41ca2b4a65f6bc113693948a9d1 Mar 20 13:45:53 crc kubenswrapper[4690]: I0320 13:45:53.202583 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:45:53 crc kubenswrapper[4690]: I0320 13:45:53.279043 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8052dc48-751a-41d4-9af8-6cdc16beb21d","Type":"ContainerStarted","Data":"18047acb674e7a7d7b0d7fb7308f17e7f7bdc41ca2b4a65f6bc113693948a9d1"} Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.299014 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8052dc48-751a-41d4-9af8-6cdc16beb21d","Type":"ContainerStarted","Data":"7cffff86788f60940a1792daa55d1752197add35930c7d487367a4d90df4f056"} Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.300162 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8052dc48-751a-41d4-9af8-6cdc16beb21d","Type":"ContainerStarted","Data":"65d13e6f22c58f517425c342af9294a86067b0d1bb4ecc0e7e8547c83629f775"} Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.308456 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerStarted","Data":"1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5"} Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.308811 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="ceilometer-central-agent" containerID="cri-o://fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa" gracePeriod=30 Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.311413 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.311529 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="proxy-httpd" containerID="cri-o://1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5" gracePeriod=30 Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.311673 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="sg-core" containerID="cri-o://c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9" gracePeriod=30 Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.311724 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="ceilometer-notification-agent" containerID="cri-o://e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d" gracePeriod=30 Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.334691 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.334666456 podStartE2EDuration="2.334666456s" podCreationTimestamp="2026-03-20 13:45:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:45:54.317777704 +0000 UTC m=+1400.607377667" watchObservedRunningTime="2026-03-20 13:45:54.334666456 +0000 UTC m=+1400.624266409" Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.370961 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.171372407 podStartE2EDuration="6.37094148s" podCreationTimestamp="2026-03-20 13:45:48 +0000 UTC" firstStartedPulling="2026-03-20 13:45:49.491442374 +0000 UTC m=+1395.781042317" lastFinishedPulling="2026-03-20 13:45:53.691011437 +0000 UTC m=+1399.980611390" observedRunningTime="2026-03-20 13:45:54.354017738 +0000 UTC m=+1400.643617721" watchObservedRunningTime="2026-03-20 13:45:54.37094148 +0000 UTC m=+1400.660541423" Mar 20 13:45:54 crc kubenswrapper[4690]: I0320 13:45:54.815415 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.323617 4690 generic.go:334] "Generic (PLEG): container finished" podID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerID="1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5" exitCode=0 Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.323941 4690 generic.go:334] "Generic (PLEG): container finished" podID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerID="c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9" exitCode=2 Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.323686 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerDied","Data":"1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5"} Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.323953 4690 generic.go:334] "Generic (PLEG): container finished" podID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerID="e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d" exitCode=0 Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.324024 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerDied","Data":"c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9"} Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.324159 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerDied","Data":"e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d"} Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.682067 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-cd5cbd7b9-qk7qx" Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.751557 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-wq9qf"] Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.751815 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" podUID="241eb71a-d9c7-4281-a71c-ed845166f03c" containerName="dnsmasq-dns" containerID="cri-o://864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526" gracePeriod=10 Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.849902 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.968553 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-ceilometer-tls-certs\") pod \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.968604 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-combined-ca-bundle\") pod \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.968661 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-log-httpd\") pod \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.969592 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-run-httpd\") pod \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.969626 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-scripts\") pod \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.969651 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-config-data\") pod \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.969724 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-sg-core-conf-yaml\") pod \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.969757 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85pwr\" (UniqueName: \"kubernetes.io/projected/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-kube-api-access-85pwr\") pod \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\" (UID: \"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd\") " Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.969899 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" (UID: "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.969889 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" (UID: "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.970445 4690 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-run-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.970463 4690 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-log-httpd\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.975429 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-scripts" (OuterVolumeSpecName: "scripts") pod "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" (UID: "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:55 crc kubenswrapper[4690]: I0320 13:45:55.980124 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-kube-api-access-85pwr" (OuterVolumeSpecName: "kube-api-access-85pwr") pod "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" (UID: "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd"). InnerVolumeSpecName "kube-api-access-85pwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.002787 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" (UID: "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.037812 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" (UID: "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.072198 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.072273 4690 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.072288 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85pwr\" (UniqueName: \"kubernetes.io/projected/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-kube-api-access-85pwr\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.072300 4690 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.088713 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" (UID: "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.108537 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-config-data" (OuterVolumeSpecName: "config-data") pod "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" (UID: "d8611fb6-d58e-4a08-9d89-c8efb65e5dcd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.154410 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.174698 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.174737 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.276189 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-config\") pod \"241eb71a-d9c7-4281-a71c-ed845166f03c\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.276423 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98wsp\" (UniqueName: \"kubernetes.io/projected/241eb71a-d9c7-4281-a71c-ed845166f03c-kube-api-access-98wsp\") pod \"241eb71a-d9c7-4281-a71c-ed845166f03c\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.277011 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-svc\") pod \"241eb71a-d9c7-4281-a71c-ed845166f03c\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.277089 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-swift-storage-0\") pod \"241eb71a-d9c7-4281-a71c-ed845166f03c\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.277152 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-sb\") pod \"241eb71a-d9c7-4281-a71c-ed845166f03c\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.277253 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-nb\") pod \"241eb71a-d9c7-4281-a71c-ed845166f03c\" (UID: \"241eb71a-d9c7-4281-a71c-ed845166f03c\") " Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.279389 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/241eb71a-d9c7-4281-a71c-ed845166f03c-kube-api-access-98wsp" (OuterVolumeSpecName: "kube-api-access-98wsp") pod "241eb71a-d9c7-4281-a71c-ed845166f03c" (UID: "241eb71a-d9c7-4281-a71c-ed845166f03c"). InnerVolumeSpecName "kube-api-access-98wsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.321212 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "241eb71a-d9c7-4281-a71c-ed845166f03c" (UID: "241eb71a-d9c7-4281-a71c-ed845166f03c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.321342 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "241eb71a-d9c7-4281-a71c-ed845166f03c" (UID: "241eb71a-d9c7-4281-a71c-ed845166f03c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.321982 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-config" (OuterVolumeSpecName: "config") pod "241eb71a-d9c7-4281-a71c-ed845166f03c" (UID: "241eb71a-d9c7-4281-a71c-ed845166f03c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.336519 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "241eb71a-d9c7-4281-a71c-ed845166f03c" (UID: "241eb71a-d9c7-4281-a71c-ed845166f03c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.339397 4690 generic.go:334] "Generic (PLEG): container finished" podID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerID="fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa" exitCode=0 Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.339464 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.339488 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerDied","Data":"fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa"} Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.339551 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"d8611fb6-d58e-4a08-9d89-c8efb65e5dcd","Type":"ContainerDied","Data":"46024b2a199f6f28746b7c6cc6c30dbdbef85374aa6c6f925b4a0dba38112806"} Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.339628 4690 scope.go:117] "RemoveContainer" containerID="1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.341856 4690 generic.go:334] "Generic (PLEG): container finished" podID="241eb71a-d9c7-4281-a71c-ed845166f03c" containerID="864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526" exitCode=0 Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.342827 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.343015 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" event={"ID":"241eb71a-d9c7-4281-a71c-ed845166f03c","Type":"ContainerDied","Data":"864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526"} Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.343037 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-bccf8f775-wq9qf" event={"ID":"241eb71a-d9c7-4281-a71c-ed845166f03c","Type":"ContainerDied","Data":"e746e26c02573aa486fd56cf9d5295bf689e22d6192d632921c511c814cacdc4"} Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.350283 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "241eb71a-d9c7-4281-a71c-ed845166f03c" (UID: "241eb71a-d9c7-4281-a71c-ed845166f03c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.377602 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.380628 4690 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-svc\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.380654 4690 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.380664 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.380672 4690 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.380680 4690 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/241eb71a-d9c7-4281-a71c-ed845166f03c-config\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.380689 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98wsp\" (UniqueName: \"kubernetes.io/projected/241eb71a-d9c7-4281-a71c-ed845166f03c-kube-api-access-98wsp\") on node \"crc\" DevicePath \"\"" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.382906 4690 scope.go:117] "RemoveContainer" containerID="c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.400636 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.412009 4690 scope.go:117] "RemoveContainer" containerID="e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.438255 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" path="/var/lib/kubelet/pods/d8611fb6-d58e-4a08-9d89-c8efb65e5dcd/volumes" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439365 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.439636 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="proxy-httpd" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439652 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="proxy-httpd" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.439672 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="ceilometer-notification-agent" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439680 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="ceilometer-notification-agent" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.439694 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="sg-core" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439700 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="sg-core" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.439715 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="ceilometer-central-agent" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439721 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="ceilometer-central-agent" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.439732 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="241eb71a-d9c7-4281-a71c-ed845166f03c" containerName="init" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439738 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="241eb71a-d9c7-4281-a71c-ed845166f03c" containerName="init" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.439750 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="241eb71a-d9c7-4281-a71c-ed845166f03c" containerName="dnsmasq-dns" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439756 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="241eb71a-d9c7-4281-a71c-ed845166f03c" containerName="dnsmasq-dns" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439956 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="ceilometer-central-agent" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439974 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="proxy-httpd" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439988 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="ceilometer-notification-agent" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.439999 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="241eb71a-d9c7-4281-a71c-ed845166f03c" containerName="dnsmasq-dns" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.440007 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8611fb6-d58e-4a08-9d89-c8efb65e5dcd" containerName="sg-core" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.442379 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.442471 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.447180 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.447330 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.447389 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.449415 4690 scope.go:117] "RemoveContainer" containerID="fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.472815 4690 scope.go:117] "RemoveContainer" containerID="1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.473384 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5\": container with ID starting with 1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5 not found: ID does not exist" containerID="1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.473424 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5"} err="failed to get container status \"1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5\": rpc error: code = NotFound desc = could not find container \"1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5\": container with ID starting with 1893285ddea527065dba9eee60636fca19eab19248e202e4ca132b42207fd5c5 not found: ID does not exist" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.473451 4690 scope.go:117] "RemoveContainer" containerID="c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.473706 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9\": container with ID starting with c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9 not found: ID does not exist" containerID="c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.473731 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9"} err="failed to get container status \"c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9\": rpc error: code = NotFound desc = could not find container \"c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9\": container with ID starting with c62f362b8866d823c44f26cac47c6b3cb22d6fefd3644cb10068709445f27ed9 not found: ID does not exist" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.473746 4690 scope.go:117] "RemoveContainer" containerID="e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.473986 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d\": container with ID starting with e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d not found: ID does not exist" containerID="e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.474010 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d"} err="failed to get container status \"e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d\": rpc error: code = NotFound desc = could not find container \"e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d\": container with ID starting with e97619cecf13bb823b3c769f611aebef6f4ed654f8755e8f82ef6e69ac2a583d not found: ID does not exist" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.474024 4690 scope.go:117] "RemoveContainer" containerID="fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.474217 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa\": container with ID starting with fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa not found: ID does not exist" containerID="fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.474239 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa"} err="failed to get container status \"fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa\": rpc error: code = NotFound desc = could not find container \"fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa\": container with ID starting with fabd1cfd1826342dc0a0f62e7c7a653e2edf1b55f26264f4a6532168992a85fa not found: ID does not exist" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.474252 4690 scope.go:117] "RemoveContainer" containerID="864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.483945 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.484062 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-scripts\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.484173 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.484195 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c121e50a-142e-42c2-b5a9-6d569d18176e-run-httpd\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.484253 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-config-data\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.484297 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jc5nm\" (UniqueName: \"kubernetes.io/projected/c121e50a-142e-42c2-b5a9-6d569d18176e-kube-api-access-jc5nm\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.484326 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.484349 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c121e50a-142e-42c2-b5a9-6d569d18176e-log-httpd\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.498834 4690 scope.go:117] "RemoveContainer" containerID="583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.519201 4690 scope.go:117] "RemoveContainer" containerID="864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.519636 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526\": container with ID starting with 864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526 not found: ID does not exist" containerID="864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.519680 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526"} err="failed to get container status \"864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526\": rpc error: code = NotFound desc = could not find container \"864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526\": container with ID starting with 864c356eac392fda810ecc8cccfba3bca0f0f744aab7fe496014ac0cdacb1526 not found: ID does not exist" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.519706 4690 scope.go:117] "RemoveContainer" containerID="583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e" Mar 20 13:45:56 crc kubenswrapper[4690]: E0320 13:45:56.521042 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e\": container with ID starting with 583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e not found: ID does not exist" containerID="583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.521076 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e"} err="failed to get container status \"583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e\": rpc error: code = NotFound desc = could not find container \"583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e\": container with ID starting with 583449fe37fbf5264e5ef884d08faebbae930ef635f9c262fbeb4f2f2d791b6e not found: ID does not exist" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.586574 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-scripts\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.587011 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.587276 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c121e50a-142e-42c2-b5a9-6d569d18176e-run-httpd\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.587421 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-config-data\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.587646 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jc5nm\" (UniqueName: \"kubernetes.io/projected/c121e50a-142e-42c2-b5a9-6d569d18176e-kube-api-access-jc5nm\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.588540 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.588670 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c121e50a-142e-42c2-b5a9-6d569d18176e-log-httpd\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.588833 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.588336 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c121e50a-142e-42c2-b5a9-6d569d18176e-run-httpd\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.589626 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c121e50a-142e-42c2-b5a9-6d569d18176e-log-httpd\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.590213 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.591466 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-config-data\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.596179 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.596433 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-scripts\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.599592 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c121e50a-142e-42c2-b5a9-6d569d18176e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.606351 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jc5nm\" (UniqueName: \"kubernetes.io/projected/c121e50a-142e-42c2-b5a9-6d569d18176e-kube-api-access-jc5nm\") pod \"ceilometer-0\" (UID: \"c121e50a-142e-42c2-b5a9-6d569d18176e\") " pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.761315 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-wq9qf"] Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.767260 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Mar 20 13:45:56 crc kubenswrapper[4690]: I0320 13:45:56.772963 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-bccf8f775-wq9qf"] Mar 20 13:45:57 crc kubenswrapper[4690]: I0320 13:45:57.309790 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Mar 20 13:45:57 crc kubenswrapper[4690]: W0320 13:45:57.315569 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc121e50a_142e_42c2_b5a9_6d569d18176e.slice/crio-e707c51cef37912fa48b8ae3cd83552296bc3d42ac36582a55a70ccfb77eb094 WatchSource:0}: Error finding container e707c51cef37912fa48b8ae3cd83552296bc3d42ac36582a55a70ccfb77eb094: Status 404 returned error can't find the container with id e707c51cef37912fa48b8ae3cd83552296bc3d42ac36582a55a70ccfb77eb094 Mar 20 13:45:57 crc kubenswrapper[4690]: I0320 13:45:57.351572 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c121e50a-142e-42c2-b5a9-6d569d18176e","Type":"ContainerStarted","Data":"e707c51cef37912fa48b8ae3cd83552296bc3d42ac36582a55a70ccfb77eb094"} Mar 20 13:45:58 crc kubenswrapper[4690]: I0320 13:45:58.365075 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c121e50a-142e-42c2-b5a9-6d569d18176e","Type":"ContainerStarted","Data":"ce321b5e39d5beea12a2da70ff9c39ff91551b863cf8fa1eb53fc41b15945718"} Mar 20 13:45:58 crc kubenswrapper[4690]: I0320 13:45:58.460987 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="241eb71a-d9c7-4281-a71c-ed845166f03c" path="/var/lib/kubelet/pods/241eb71a-d9c7-4281-a71c-ed845166f03c/volumes" Mar 20 13:45:59 crc kubenswrapper[4690]: I0320 13:45:59.386258 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c121e50a-142e-42c2-b5a9-6d569d18176e","Type":"ContainerStarted","Data":"ee89ca72dd3fd6fef6b7e64201a58e863b28b3fe48f62e63d1563bb63488581d"} Mar 20 13:45:59 crc kubenswrapper[4690]: I0320 13:45:59.386656 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c121e50a-142e-42c2-b5a9-6d569d18176e","Type":"ContainerStarted","Data":"2bbf29204e5ec54307463291643c9b629702329a92ade1472070bd2455d2eb7b"} Mar 20 13:45:59 crc kubenswrapper[4690]: I0320 13:45:59.615163 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Mar 20 13:45:59 crc kubenswrapper[4690]: I0320 13:45:59.615546 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Mar 20 13:45:59 crc kubenswrapper[4690]: I0320 13:45:59.816523 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:45:59 crc kubenswrapper[4690]: I0320 13:45:59.836926 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.139487 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566906-ncdqs"] Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.141225 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566906-ncdqs" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.145220 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.145467 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.145904 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.157329 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566906-ncdqs"] Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.278764 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvd5m\" (UniqueName: \"kubernetes.io/projected/7c007960-0fbe-471e-95a9-fecd3bcfb0f5-kube-api-access-pvd5m\") pod \"auto-csr-approver-29566906-ncdqs\" (UID: \"7c007960-0fbe-471e-95a9-fecd3bcfb0f5\") " pod="openshift-infra/auto-csr-approver-29566906-ncdqs" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.380986 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvd5m\" (UniqueName: \"kubernetes.io/projected/7c007960-0fbe-471e-95a9-fecd3bcfb0f5-kube-api-access-pvd5m\") pod \"auto-csr-approver-29566906-ncdqs\" (UID: \"7c007960-0fbe-471e-95a9-fecd3bcfb0f5\") " pod="openshift-infra/auto-csr-approver-29566906-ncdqs" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.406641 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvd5m\" (UniqueName: \"kubernetes.io/projected/7c007960-0fbe-471e-95a9-fecd3bcfb0f5-kube-api-access-pvd5m\") pod \"auto-csr-approver-29566906-ncdqs\" (UID: \"7c007960-0fbe-471e-95a9-fecd3bcfb0f5\") " pod="openshift-infra/auto-csr-approver-29566906-ncdqs" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.433080 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.472946 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566906-ncdqs" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.633272 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-fmsn9"] Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.634818 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.636863 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.637444 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.637746 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.637699 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.647724 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-fmsn9"] Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.791876 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-config-data\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.791951 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-scripts\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.792018 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v752t\" (UniqueName: \"kubernetes.io/projected/ce04bc8c-c482-4ff3-a0b5-303db0874640-kube-api-access-v752t\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.792082 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.893519 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.893600 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-config-data\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.893663 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-scripts\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.893736 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v752t\" (UniqueName: \"kubernetes.io/projected/ce04bc8c-c482-4ff3-a0b5-303db0874640-kube-api-access-v752t\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.898184 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-config-data\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.898473 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-scripts\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.907573 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.909241 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v752t\" (UniqueName: \"kubernetes.io/projected/ce04bc8c-c482-4ff3-a0b5-303db0874640-kube-api-access-v752t\") pod \"nova-cell1-cell-mapping-fmsn9\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:00 crc kubenswrapper[4690]: I0320 13:46:00.982402 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:01 crc kubenswrapper[4690]: I0320 13:46:01.039775 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566906-ncdqs"] Mar 20 13:46:01 crc kubenswrapper[4690]: W0320 13:46:01.055222 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c007960_0fbe_471e_95a9_fecd3bcfb0f5.slice/crio-af6f461f6554126d9ee46e40d21734cd97e83a06ceba9615b05d49f6bc6a7f91 WatchSource:0}: Error finding container af6f461f6554126d9ee46e40d21734cd97e83a06ceba9615b05d49f6bc6a7f91: Status 404 returned error can't find the container with id af6f461f6554126d9ee46e40d21734cd97e83a06ceba9615b05d49f6bc6a7f91 Mar 20 13:46:01 crc kubenswrapper[4690]: I0320 13:46:01.409429 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566906-ncdqs" event={"ID":"7c007960-0fbe-471e-95a9-fecd3bcfb0f5","Type":"ContainerStarted","Data":"af6f461f6554126d9ee46e40d21734cd97e83a06ceba9615b05d49f6bc6a7f91"} Mar 20 13:46:01 crc kubenswrapper[4690]: I0320 13:46:01.452915 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-fmsn9"] Mar 20 13:46:01 crc kubenswrapper[4690]: W0320 13:46:01.475802 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce04bc8c_c482_4ff3_a0b5_303db0874640.slice/crio-067883712e6b2e00dc050ee32c1e954b1c86a6345e648a918ee5bd556bcf1a68 WatchSource:0}: Error finding container 067883712e6b2e00dc050ee32c1e954b1c86a6345e648a918ee5bd556bcf1a68: Status 404 returned error can't find the container with id 067883712e6b2e00dc050ee32c1e954b1c86a6345e648a918ee5bd556bcf1a68 Mar 20 13:46:02 crc kubenswrapper[4690]: I0320 13:46:02.431662 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-fmsn9" event={"ID":"ce04bc8c-c482-4ff3-a0b5-303db0874640","Type":"ContainerStarted","Data":"140ac62382e0552785029f1f967e060e15393b0ada75296d7b0ad62b768a50b9"} Mar 20 13:46:02 crc kubenswrapper[4690]: I0320 13:46:02.433211 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-fmsn9" event={"ID":"ce04bc8c-c482-4ff3-a0b5-303db0874640","Type":"ContainerStarted","Data":"067883712e6b2e00dc050ee32c1e954b1c86a6345e648a918ee5bd556bcf1a68"} Mar 20 13:46:02 crc kubenswrapper[4690]: I0320 13:46:02.444632 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c121e50a-142e-42c2-b5a9-6d569d18176e","Type":"ContainerStarted","Data":"a8bbcd250a116480750e36e486b7d555cbbd2e2f57bf5c226deb005f48f57043"} Mar 20 13:46:02 crc kubenswrapper[4690]: I0320 13:46:02.445910 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Mar 20 13:46:02 crc kubenswrapper[4690]: I0320 13:46:02.458424 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-fmsn9" podStartSLOduration=2.458403467 podStartE2EDuration="2.458403467s" podCreationTimestamp="2026-03-20 13:46:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:46:02.448643508 +0000 UTC m=+1408.738243471" watchObservedRunningTime="2026-03-20 13:46:02.458403467 +0000 UTC m=+1408.748003420" Mar 20 13:46:02 crc kubenswrapper[4690]: I0320 13:46:02.484524 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.418638481 podStartE2EDuration="6.484507291s" podCreationTimestamp="2026-03-20 13:45:56 +0000 UTC" firstStartedPulling="2026-03-20 13:45:57.318253585 +0000 UTC m=+1403.607853528" lastFinishedPulling="2026-03-20 13:46:01.384122395 +0000 UTC m=+1407.673722338" observedRunningTime="2026-03-20 13:46:02.478723776 +0000 UTC m=+1408.768323799" watchObservedRunningTime="2026-03-20 13:46:02.484507291 +0000 UTC m=+1408.774107234" Mar 20 13:46:02 crc kubenswrapper[4690]: I0320 13:46:02.700826 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Mar 20 13:46:02 crc kubenswrapper[4690]: I0320 13:46:02.701188 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Mar 20 13:46:03 crc kubenswrapper[4690]: I0320 13:46:03.463264 4690 generic.go:334] "Generic (PLEG): container finished" podID="7c007960-0fbe-471e-95a9-fecd3bcfb0f5" containerID="39687065115c8ffb25b940c788e71bc5eb58fcb0c897217f100dbdf8d5b3e004" exitCode=0 Mar 20 13:46:03 crc kubenswrapper[4690]: I0320 13:46:03.464803 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566906-ncdqs" event={"ID":"7c007960-0fbe-471e-95a9-fecd3bcfb0f5","Type":"ContainerDied","Data":"39687065115c8ffb25b940c788e71bc5eb58fcb0c897217f100dbdf8d5b3e004"} Mar 20 13:46:03 crc kubenswrapper[4690]: I0320 13:46:03.714109 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.211:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Mar 20 13:46:03 crc kubenswrapper[4690]: I0320 13:46:03.714154 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.211:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Mar 20 13:46:04 crc kubenswrapper[4690]: I0320 13:46:04.805966 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566906-ncdqs" Mar 20 13:46:04 crc kubenswrapper[4690]: I0320 13:46:04.992270 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pvd5m\" (UniqueName: \"kubernetes.io/projected/7c007960-0fbe-471e-95a9-fecd3bcfb0f5-kube-api-access-pvd5m\") pod \"7c007960-0fbe-471e-95a9-fecd3bcfb0f5\" (UID: \"7c007960-0fbe-471e-95a9-fecd3bcfb0f5\") " Mar 20 13:46:04 crc kubenswrapper[4690]: I0320 13:46:04.999894 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c007960-0fbe-471e-95a9-fecd3bcfb0f5-kube-api-access-pvd5m" (OuterVolumeSpecName: "kube-api-access-pvd5m") pod "7c007960-0fbe-471e-95a9-fecd3bcfb0f5" (UID: "7c007960-0fbe-471e-95a9-fecd3bcfb0f5"). InnerVolumeSpecName "kube-api-access-pvd5m". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:46:05 crc kubenswrapper[4690]: I0320 13:46:05.094613 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pvd5m\" (UniqueName: \"kubernetes.io/projected/7c007960-0fbe-471e-95a9-fecd3bcfb0f5-kube-api-access-pvd5m\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:05 crc kubenswrapper[4690]: I0320 13:46:05.483631 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566906-ncdqs" event={"ID":"7c007960-0fbe-471e-95a9-fecd3bcfb0f5","Type":"ContainerDied","Data":"af6f461f6554126d9ee46e40d21734cd97e83a06ceba9615b05d49f6bc6a7f91"} Mar 20 13:46:05 crc kubenswrapper[4690]: I0320 13:46:05.483682 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="af6f461f6554126d9ee46e40d21734cd97e83a06ceba9615b05d49f6bc6a7f91" Mar 20 13:46:05 crc kubenswrapper[4690]: I0320 13:46:05.483741 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566906-ncdqs" Mar 20 13:46:05 crc kubenswrapper[4690]: I0320 13:46:05.887298 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566900-xlnh4"] Mar 20 13:46:05 crc kubenswrapper[4690]: I0320 13:46:05.895302 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566900-xlnh4"] Mar 20 13:46:06 crc kubenswrapper[4690]: I0320 13:46:06.434555 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92fadff7-ff1d-474b-8043-c0fb44d2e635" path="/var/lib/kubelet/pods/92fadff7-ff1d-474b-8043-c0fb44d2e635/volumes" Mar 20 13:46:06 crc kubenswrapper[4690]: I0320 13:46:06.505045 4690 generic.go:334] "Generic (PLEG): container finished" podID="ce04bc8c-c482-4ff3-a0b5-303db0874640" containerID="140ac62382e0552785029f1f967e060e15393b0ada75296d7b0ad62b768a50b9" exitCode=0 Mar 20 13:46:06 crc kubenswrapper[4690]: I0320 13:46:06.505104 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-fmsn9" event={"ID":"ce04bc8c-c482-4ff3-a0b5-303db0874640","Type":"ContainerDied","Data":"140ac62382e0552785029f1f967e060e15393b0ada75296d7b0ad62b768a50b9"} Mar 20 13:46:07 crc kubenswrapper[4690]: I0320 13:46:07.614168 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Mar 20 13:46:07 crc kubenswrapper[4690]: I0320 13:46:07.614608 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Mar 20 13:46:07 crc kubenswrapper[4690]: I0320 13:46:07.984053 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.156937 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v752t\" (UniqueName: \"kubernetes.io/projected/ce04bc8c-c482-4ff3-a0b5-303db0874640-kube-api-access-v752t\") pod \"ce04bc8c-c482-4ff3-a0b5-303db0874640\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.157069 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-config-data\") pod \"ce04bc8c-c482-4ff3-a0b5-303db0874640\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.157110 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-combined-ca-bundle\") pod \"ce04bc8c-c482-4ff3-a0b5-303db0874640\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.157218 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-scripts\") pod \"ce04bc8c-c482-4ff3-a0b5-303db0874640\" (UID: \"ce04bc8c-c482-4ff3-a0b5-303db0874640\") " Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.165043 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce04bc8c-c482-4ff3-a0b5-303db0874640-kube-api-access-v752t" (OuterVolumeSpecName: "kube-api-access-v752t") pod "ce04bc8c-c482-4ff3-a0b5-303db0874640" (UID: "ce04bc8c-c482-4ff3-a0b5-303db0874640"). InnerVolumeSpecName "kube-api-access-v752t". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.165965 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-scripts" (OuterVolumeSpecName: "scripts") pod "ce04bc8c-c482-4ff3-a0b5-303db0874640" (UID: "ce04bc8c-c482-4ff3-a0b5-303db0874640"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.190186 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce04bc8c-c482-4ff3-a0b5-303db0874640" (UID: "ce04bc8c-c482-4ff3-a0b5-303db0874640"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.193943 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-config-data" (OuterVolumeSpecName: "config-data") pod "ce04bc8c-c482-4ff3-a0b5-303db0874640" (UID: "ce04bc8c-c482-4ff3-a0b5-303db0874640"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.259769 4690 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-scripts\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.259812 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v752t\" (UniqueName: \"kubernetes.io/projected/ce04bc8c-c482-4ff3-a0b5-303db0874640-kube-api-access-v752t\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.259828 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.259840 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce04bc8c-c482-4ff3-a0b5-303db0874640-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.526491 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-fmsn9" event={"ID":"ce04bc8c-c482-4ff3-a0b5-303db0874640","Type":"ContainerDied","Data":"067883712e6b2e00dc050ee32c1e954b1c86a6345e648a918ee5bd556bcf1a68"} Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.526543 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="067883712e6b2e00dc050ee32c1e954b1c86a6345e648a918ee5bd556bcf1a68" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.526543 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-fmsn9" Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.804110 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.805129 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="729f77d5-e919-412b-9c15-eeaf2e56f00b" containerName="nova-scheduler-scheduler" containerID="cri-o://988d7dfb94537d70fbdc98c81bad6806f1c68166f6214a0ed841394acf0ceb5e" gracePeriod=30 Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.832958 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.833942 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-log" containerID="cri-o://d8fa1083f66c1f32ef327d783429ec82fcddc21e2993b92c074dc03c0b37a8a7" gracePeriod=30 Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.833958 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-metadata" containerID="cri-o://cf65bfa25af093e923e8b70315be4798ae990b3772e2aa12319cab9b96a6540e" gracePeriod=30 Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.875209 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.875494 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-log" containerID="cri-o://65d13e6f22c58f517425c342af9294a86067b0d1bb4ecc0e7e8547c83629f775" gracePeriod=30 Mar 20 13:46:08 crc kubenswrapper[4690]: I0320 13:46:08.875641 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-api" containerID="cri-o://7cffff86788f60940a1792daa55d1752197add35930c7d487367a4d90df4f056" gracePeriod=30 Mar 20 13:46:09 crc kubenswrapper[4690]: I0320 13:46:09.539066 4690 generic.go:334] "Generic (PLEG): container finished" podID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerID="d8fa1083f66c1f32ef327d783429ec82fcddc21e2993b92c074dc03c0b37a8a7" exitCode=143 Mar 20 13:46:09 crc kubenswrapper[4690]: I0320 13:46:09.539130 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54194cca-39d2-42a4-9bc6-dc35f2e763fe","Type":"ContainerDied","Data":"d8fa1083f66c1f32ef327d783429ec82fcddc21e2993b92c074dc03c0b37a8a7"} Mar 20 13:46:09 crc kubenswrapper[4690]: I0320 13:46:09.542129 4690 generic.go:334] "Generic (PLEG): container finished" podID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerID="65d13e6f22c58f517425c342af9294a86067b0d1bb4ecc0e7e8547c83629f775" exitCode=143 Mar 20 13:46:09 crc kubenswrapper[4690]: I0320 13:46:09.542165 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8052dc48-751a-41d4-9af8-6cdc16beb21d","Type":"ContainerDied","Data":"65d13e6f22c58f517425c342af9294a86067b0d1bb4ecc0e7e8547c83629f775"} Mar 20 13:46:10 crc kubenswrapper[4690]: I0320 13:46:10.558572 4690 generic.go:334] "Generic (PLEG): container finished" podID="729f77d5-e919-412b-9c15-eeaf2e56f00b" containerID="988d7dfb94537d70fbdc98c81bad6806f1c68166f6214a0ed841394acf0ceb5e" exitCode=0 Mar 20 13:46:10 crc kubenswrapper[4690]: I0320 13:46:10.558773 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"729f77d5-e919-412b-9c15-eeaf2e56f00b","Type":"ContainerDied","Data":"988d7dfb94537d70fbdc98c81bad6806f1c68166f6214a0ed841394acf0ceb5e"} Mar 20 13:46:10 crc kubenswrapper[4690]: I0320 13:46:10.701752 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Mar 20 13:46:10 crc kubenswrapper[4690]: I0320 13:46:10.701809 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Mar 20 13:46:10 crc kubenswrapper[4690]: I0320 13:46:10.826798 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.012959 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-combined-ca-bundle\") pod \"729f77d5-e919-412b-9c15-eeaf2e56f00b\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.013097 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-config-data\") pod \"729f77d5-e919-412b-9c15-eeaf2e56f00b\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.013157 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zvjl8\" (UniqueName: \"kubernetes.io/projected/729f77d5-e919-412b-9c15-eeaf2e56f00b-kube-api-access-zvjl8\") pod \"729f77d5-e919-412b-9c15-eeaf2e56f00b\" (UID: \"729f77d5-e919-412b-9c15-eeaf2e56f00b\") " Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.018918 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/729f77d5-e919-412b-9c15-eeaf2e56f00b-kube-api-access-zvjl8" (OuterVolumeSpecName: "kube-api-access-zvjl8") pod "729f77d5-e919-412b-9c15-eeaf2e56f00b" (UID: "729f77d5-e919-412b-9c15-eeaf2e56f00b"). InnerVolumeSpecName "kube-api-access-zvjl8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.067215 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-config-data" (OuterVolumeSpecName: "config-data") pod "729f77d5-e919-412b-9c15-eeaf2e56f00b" (UID: "729f77d5-e919-412b-9c15-eeaf2e56f00b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.115046 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.115367 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zvjl8\" (UniqueName: \"kubernetes.io/projected/729f77d5-e919-412b-9c15-eeaf2e56f00b-kube-api-access-zvjl8\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.115774 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "729f77d5-e919-412b-9c15-eeaf2e56f00b" (UID: "729f77d5-e919-412b-9c15-eeaf2e56f00b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.217293 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/729f77d5-e919-412b-9c15-eeaf2e56f00b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.571671 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"729f77d5-e919-412b-9c15-eeaf2e56f00b","Type":"ContainerDied","Data":"f5448d554b2189fbe00785a5535b40883ee8f52d971b3b1a02f7d19e992cc068"} Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.571734 4690 scope.go:117] "RemoveContainer" containerID="988d7dfb94537d70fbdc98c81bad6806f1c68166f6214a0ed841394acf0ceb5e" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.571767 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.618096 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.628553 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.653737 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:46:11 crc kubenswrapper[4690]: E0320 13:46:11.654274 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c007960-0fbe-471e-95a9-fecd3bcfb0f5" containerName="oc" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.654300 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c007960-0fbe-471e-95a9-fecd3bcfb0f5" containerName="oc" Mar 20 13:46:11 crc kubenswrapper[4690]: E0320 13:46:11.654347 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce04bc8c-c482-4ff3-a0b5-303db0874640" containerName="nova-manage" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.654359 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce04bc8c-c482-4ff3-a0b5-303db0874640" containerName="nova-manage" Mar 20 13:46:11 crc kubenswrapper[4690]: E0320 13:46:11.654380 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="729f77d5-e919-412b-9c15-eeaf2e56f00b" containerName="nova-scheduler-scheduler" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.654391 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="729f77d5-e919-412b-9c15-eeaf2e56f00b" containerName="nova-scheduler-scheduler" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.654639 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c007960-0fbe-471e-95a9-fecd3bcfb0f5" containerName="oc" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.654663 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce04bc8c-c482-4ff3-a0b5-303db0874640" containerName="nova-manage" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.654675 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="729f77d5-e919-412b-9c15-eeaf2e56f00b" containerName="nova-scheduler-scheduler" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.655454 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.659167 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.665143 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.830490 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6p6k\" (UniqueName: \"kubernetes.io/projected/a20b9cc4-387a-4e80-935e-e94a582e3843-kube-api-access-v6p6k\") pod \"nova-scheduler-0\" (UID: \"a20b9cc4-387a-4e80-935e-e94a582e3843\") " pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.830567 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20b9cc4-387a-4e80-935e-e94a582e3843-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a20b9cc4-387a-4e80-935e-e94a582e3843\") " pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.830661 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20b9cc4-387a-4e80-935e-e94a582e3843-config-data\") pod \"nova-scheduler-0\" (UID: \"a20b9cc4-387a-4e80-935e-e94a582e3843\") " pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.932914 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20b9cc4-387a-4e80-935e-e94a582e3843-config-data\") pod \"nova-scheduler-0\" (UID: \"a20b9cc4-387a-4e80-935e-e94a582e3843\") " pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.933257 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6p6k\" (UniqueName: \"kubernetes.io/projected/a20b9cc4-387a-4e80-935e-e94a582e3843-kube-api-access-v6p6k\") pod \"nova-scheduler-0\" (UID: \"a20b9cc4-387a-4e80-935e-e94a582e3843\") " pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.933301 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20b9cc4-387a-4e80-935e-e94a582e3843-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a20b9cc4-387a-4e80-935e-e94a582e3843\") " pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.937417 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a20b9cc4-387a-4e80-935e-e94a582e3843-config-data\") pod \"nova-scheduler-0\" (UID: \"a20b9cc4-387a-4e80-935e-e94a582e3843\") " pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.938598 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a20b9cc4-387a-4e80-935e-e94a582e3843-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"a20b9cc4-387a-4e80-935e-e94a582e3843\") " pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.966263 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6p6k\" (UniqueName: \"kubernetes.io/projected/a20b9cc4-387a-4e80-935e-e94a582e3843-kube-api-access-v6p6k\") pod \"nova-scheduler-0\" (UID: \"a20b9cc4-387a-4e80-935e-e94a582e3843\") " pod="openstack/nova-scheduler-0" Mar 20 13:46:11 crc kubenswrapper[4690]: I0320 13:46:11.991854 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Mar 20 13:46:12 crc kubenswrapper[4690]: E0320 13:46:12.193293 4690 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8052dc48_751a_41d4_9af8_6cdc16beb21d.slice/crio-conmon-7cffff86788f60940a1792daa55d1752197add35930c7d487367a4d90df4f056.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8052dc48_751a_41d4_9af8_6cdc16beb21d.slice/crio-7cffff86788f60940a1792daa55d1752197add35930c7d487367a4d90df4f056.scope\": RecentStats: unable to find data in memory cache]" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.430218 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="729f77d5-e919-412b-9c15-eeaf2e56f00b" path="/var/lib/kubelet/pods/729f77d5-e919-412b-9c15-eeaf2e56f00b/volumes" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.585455 4690 generic.go:334] "Generic (PLEG): container finished" podID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerID="cf65bfa25af093e923e8b70315be4798ae990b3772e2aa12319cab9b96a6540e" exitCode=0 Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.586008 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54194cca-39d2-42a4-9bc6-dc35f2e763fe","Type":"ContainerDied","Data":"cf65bfa25af093e923e8b70315be4798ae990b3772e2aa12319cab9b96a6540e"} Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.586045 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"54194cca-39d2-42a4-9bc6-dc35f2e763fe","Type":"ContainerDied","Data":"0f1fa142b11d8e895f43c166c94eb2bcf368dca93aac11e704b7fd2d7def23b0"} Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.586066 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f1fa142b11d8e895f43c166c94eb2bcf368dca93aac11e704b7fd2d7def23b0" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.588447 4690 generic.go:334] "Generic (PLEG): container finished" podID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerID="7cffff86788f60940a1792daa55d1752197add35930c7d487367a4d90df4f056" exitCode=0 Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.588515 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8052dc48-751a-41d4-9af8-6cdc16beb21d","Type":"ContainerDied","Data":"7cffff86788f60940a1792daa55d1752197add35930c7d487367a4d90df4f056"} Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.588573 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8052dc48-751a-41d4-9af8-6cdc16beb21d","Type":"ContainerDied","Data":"18047acb674e7a7d7b0d7fb7308f17e7f7bdc41ca2b4a65f6bc113693948a9d1"} Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.588587 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="18047acb674e7a7d7b0d7fb7308f17e7f7bdc41ca2b4a65f6bc113693948a9d1" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.637138 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.639189 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.667772 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.749094 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-nova-metadata-tls-certs\") pod \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.749525 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sr7l\" (UniqueName: \"kubernetes.io/projected/54194cca-39d2-42a4-9bc6-dc35f2e763fe-kube-api-access-9sr7l\") pod \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.749638 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-combined-ca-bundle\") pod \"8052dc48-751a-41d4-9af8-6cdc16beb21d\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.749729 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8052dc48-751a-41d4-9af8-6cdc16beb21d-logs\") pod \"8052dc48-751a-41d4-9af8-6cdc16beb21d\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.749825 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-config-data\") pod \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.749880 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-internal-tls-certs\") pod \"8052dc48-751a-41d4-9af8-6cdc16beb21d\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.749977 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54194cca-39d2-42a4-9bc6-dc35f2e763fe-logs\") pod \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.750033 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-combined-ca-bundle\") pod \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\" (UID: \"54194cca-39d2-42a4-9bc6-dc35f2e763fe\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.750083 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-config-data\") pod \"8052dc48-751a-41d4-9af8-6cdc16beb21d\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.750122 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nffsp\" (UniqueName: \"kubernetes.io/projected/8052dc48-751a-41d4-9af8-6cdc16beb21d-kube-api-access-nffsp\") pod \"8052dc48-751a-41d4-9af8-6cdc16beb21d\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.750160 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-public-tls-certs\") pod \"8052dc48-751a-41d4-9af8-6cdc16beb21d\" (UID: \"8052dc48-751a-41d4-9af8-6cdc16beb21d\") " Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.750418 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8052dc48-751a-41d4-9af8-6cdc16beb21d-logs" (OuterVolumeSpecName: "logs") pod "8052dc48-751a-41d4-9af8-6cdc16beb21d" (UID: "8052dc48-751a-41d4-9af8-6cdc16beb21d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.750516 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/54194cca-39d2-42a4-9bc6-dc35f2e763fe-logs" (OuterVolumeSpecName: "logs") pod "54194cca-39d2-42a4-9bc6-dc35f2e763fe" (UID: "54194cca-39d2-42a4-9bc6-dc35f2e763fe"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.751115 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8052dc48-751a-41d4-9af8-6cdc16beb21d-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.751145 4690 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/54194cca-39d2-42a4-9bc6-dc35f2e763fe-logs\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.753878 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8052dc48-751a-41d4-9af8-6cdc16beb21d-kube-api-access-nffsp" (OuterVolumeSpecName: "kube-api-access-nffsp") pod "8052dc48-751a-41d4-9af8-6cdc16beb21d" (UID: "8052dc48-751a-41d4-9af8-6cdc16beb21d"). InnerVolumeSpecName "kube-api-access-nffsp". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.755695 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/54194cca-39d2-42a4-9bc6-dc35f2e763fe-kube-api-access-9sr7l" (OuterVolumeSpecName: "kube-api-access-9sr7l") pod "54194cca-39d2-42a4-9bc6-dc35f2e763fe" (UID: "54194cca-39d2-42a4-9bc6-dc35f2e763fe"). InnerVolumeSpecName "kube-api-access-9sr7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.776868 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-config-data" (OuterVolumeSpecName: "config-data") pod "54194cca-39d2-42a4-9bc6-dc35f2e763fe" (UID: "54194cca-39d2-42a4-9bc6-dc35f2e763fe"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.781893 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8052dc48-751a-41d4-9af8-6cdc16beb21d" (UID: "8052dc48-751a-41d4-9af8-6cdc16beb21d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.786558 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-config-data" (OuterVolumeSpecName: "config-data") pod "8052dc48-751a-41d4-9af8-6cdc16beb21d" (UID: "8052dc48-751a-41d4-9af8-6cdc16beb21d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.792392 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "54194cca-39d2-42a4-9bc6-dc35f2e763fe" (UID: "54194cca-39d2-42a4-9bc6-dc35f2e763fe"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.806119 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "54194cca-39d2-42a4-9bc6-dc35f2e763fe" (UID: "54194cca-39d2-42a4-9bc6-dc35f2e763fe"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.822115 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "8052dc48-751a-41d4-9af8-6cdc16beb21d" (UID: "8052dc48-751a-41d4-9af8-6cdc16beb21d"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.853227 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sr7l\" (UniqueName: \"kubernetes.io/projected/54194cca-39d2-42a4-9bc6-dc35f2e763fe-kube-api-access-9sr7l\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.853265 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.853280 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.853290 4690 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.853300 4690 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.853310 4690 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-config-data\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.853320 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nffsp\" (UniqueName: \"kubernetes.io/projected/8052dc48-751a-41d4-9af8-6cdc16beb21d-kube-api-access-nffsp\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.853329 4690 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/54194cca-39d2-42a4-9bc6-dc35f2e763fe-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.855170 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "8052dc48-751a-41d4-9af8-6cdc16beb21d" (UID: "8052dc48-751a-41d4-9af8-6cdc16beb21d"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Mar 20 13:46:12 crc kubenswrapper[4690]: I0320 13:46:12.955093 4690 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8052dc48-751a-41d4-9af8-6cdc16beb21d-public-tls-certs\") on node \"crc\" DevicePath \"\"" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.622506 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.622688 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.622450 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a20b9cc4-387a-4e80-935e-e94a582e3843","Type":"ContainerStarted","Data":"92fe0b3481791b856df8a266355a62a0d0091ed10de2650a7f558ce661fdf204"} Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.622796 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"a20b9cc4-387a-4e80-935e-e94a582e3843","Type":"ContainerStarted","Data":"e02873d4f652c86efb2312e1719972940a1786ca3b43605d11ab3f26a40a0dbb"} Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.657544 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.657525686 podStartE2EDuration="2.657525686s" podCreationTimestamp="2026-03-20 13:46:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:46:13.653327716 +0000 UTC m=+1419.942927669" watchObservedRunningTime="2026-03-20 13:46:13.657525686 +0000 UTC m=+1419.947125629" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.680972 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.698385 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.721408 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.741271 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.754668 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:46:13 crc kubenswrapper[4690]: E0320 13:46:13.755168 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-log" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.755187 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-log" Mar 20 13:46:13 crc kubenswrapper[4690]: E0320 13:46:13.755208 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-api" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.755215 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-api" Mar 20 13:46:13 crc kubenswrapper[4690]: E0320 13:46:13.755224 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-log" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.755230 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-log" Mar 20 13:46:13 crc kubenswrapper[4690]: E0320 13:46:13.755245 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-metadata" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.755253 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-metadata" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.755425 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-api" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.755444 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-log" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.755457 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" containerName="nova-metadata-metadata" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.755471 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" containerName="nova-api-log" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.756399 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.759017 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.759026 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.763000 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.777449 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.778787 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.780675 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.780806 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.780879 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.788552 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.874201 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-config-data\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.874275 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlzl2\" (UniqueName: \"kubernetes.io/projected/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-kube-api-access-hlzl2\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.874402 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.874812 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-logs\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.874915 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.976643 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-config-data\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.976694 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2zrhv\" (UniqueName: \"kubernetes.io/projected/8620ab8a-7126-4218-aea8-618f506ca17c-kube-api-access-2zrhv\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.976744 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.976986 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-config-data\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.977106 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-public-tls-certs\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.977365 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlzl2\" (UniqueName: \"kubernetes.io/projected/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-kube-api-access-hlzl2\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.977460 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8620ab8a-7126-4218-aea8-618f506ca17c-logs\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.977519 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.977654 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-logs\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.977733 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.977800 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.978695 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-logs\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.983718 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.985076 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-config-data\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:13 crc kubenswrapper[4690]: I0320 13:46:13.988967 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.020889 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlzl2\" (UniqueName: \"kubernetes.io/projected/688c9fe8-f4e6-4e49-9e20-b1f3adbefa78-kube-api-access-hlzl2\") pod \"nova-metadata-0\" (UID: \"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78\") " pod="openstack/nova-metadata-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.080377 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8620ab8a-7126-4218-aea8-618f506ca17c-logs\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.080679 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.080791 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-config-data\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.080835 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2zrhv\" (UniqueName: \"kubernetes.io/projected/8620ab8a-7126-4218-aea8-618f506ca17c-kube-api-access-2zrhv\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.080887 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.080923 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.080967 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-public-tls-certs\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.081203 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8620ab8a-7126-4218-aea8-618f506ca17c-logs\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.085311 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-public-tls-certs\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.087150 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-config-data\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.095432 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-internal-tls-certs\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.095917 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8620ab8a-7126-4218-aea8-618f506ca17c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.101652 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2zrhv\" (UniqueName: \"kubernetes.io/projected/8620ab8a-7126-4218-aea8-618f506ca17c-kube-api-access-2zrhv\") pod \"nova-api-0\" (UID: \"8620ab8a-7126-4218-aea8-618f506ca17c\") " pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.394612 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.437652 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="54194cca-39d2-42a4-9bc6-dc35f2e763fe" path="/var/lib/kubelet/pods/54194cca-39d2-42a4-9bc6-dc35f2e763fe/volumes" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.438667 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8052dc48-751a-41d4-9af8-6cdc16beb21d" path="/var/lib/kubelet/pods/8052dc48-751a-41d4-9af8-6cdc16beb21d/volumes" Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.606048 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.636600 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78","Type":"ContainerStarted","Data":"9201d5fe8f10966ac0d83354a600ed6be0110abbe579ff9e59be6292c04bde60"} Mar 20 13:46:14 crc kubenswrapper[4690]: I0320 13:46:14.902215 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Mar 20 13:46:14 crc kubenswrapper[4690]: W0320 13:46:14.903790 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8620ab8a_7126_4218_aea8_618f506ca17c.slice/crio-2128a2079a1269b6fab1db47ee9b5000e12b00af2cb1a1b1b63d7fc60014ca15 WatchSource:0}: Error finding container 2128a2079a1269b6fab1db47ee9b5000e12b00af2cb1a1b1b63d7fc60014ca15: Status 404 returned error can't find the container with id 2128a2079a1269b6fab1db47ee9b5000e12b00af2cb1a1b1b63d7fc60014ca15 Mar 20 13:46:15 crc kubenswrapper[4690]: I0320 13:46:15.647753 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78","Type":"ContainerStarted","Data":"b08f1a01d4f4d818f85774291adf0421e14b777d1a687435d3a13bfe2684ee9f"} Mar 20 13:46:15 crc kubenswrapper[4690]: I0320 13:46:15.647803 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"688c9fe8-f4e6-4e49-9e20-b1f3adbefa78","Type":"ContainerStarted","Data":"9cde355258fe03c0d355660d65052f4bb77b6ce8c3b69714d818769b266130c3"} Mar 20 13:46:15 crc kubenswrapper[4690]: I0320 13:46:15.651686 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8620ab8a-7126-4218-aea8-618f506ca17c","Type":"ContainerStarted","Data":"f6af0073c63969ce3d2ce35064d020af7e1f5f74ca05e13054b8c110513540d9"} Mar 20 13:46:15 crc kubenswrapper[4690]: I0320 13:46:15.651722 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8620ab8a-7126-4218-aea8-618f506ca17c","Type":"ContainerStarted","Data":"9d337886f55ad4baa5c666e7b995b47807af5a551a37afaa9a939a8a038ddf30"} Mar 20 13:46:15 crc kubenswrapper[4690]: I0320 13:46:15.651737 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"8620ab8a-7126-4218-aea8-618f506ca17c","Type":"ContainerStarted","Data":"2128a2079a1269b6fab1db47ee9b5000e12b00af2cb1a1b1b63d7fc60014ca15"} Mar 20 13:46:15 crc kubenswrapper[4690]: I0320 13:46:15.700984 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.70096056 podStartE2EDuration="2.70096056s" podCreationTimestamp="2026-03-20 13:46:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:46:15.691362786 +0000 UTC m=+1421.980962739" watchObservedRunningTime="2026-03-20 13:46:15.70096056 +0000 UTC m=+1421.990560513" Mar 20 13:46:15 crc kubenswrapper[4690]: I0320 13:46:15.702727 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.70272114 podStartE2EDuration="2.70272114s" podCreationTimestamp="2026-03-20 13:46:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-03-20 13:46:15.669797521 +0000 UTC m=+1421.959397464" watchObservedRunningTime="2026-03-20 13:46:15.70272114 +0000 UTC m=+1421.992321093" Mar 20 13:46:16 crc kubenswrapper[4690]: I0320 13:46:16.992288 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Mar 20 13:46:21 crc kubenswrapper[4690]: I0320 13:46:21.993294 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Mar 20 13:46:22 crc kubenswrapper[4690]: I0320 13:46:22.047501 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Mar 20 13:46:22 crc kubenswrapper[4690]: I0320 13:46:22.767116 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Mar 20 13:46:24 crc kubenswrapper[4690]: I0320 13:46:24.081435 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Mar 20 13:46:24 crc kubenswrapper[4690]: I0320 13:46:24.081474 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Mar 20 13:46:24 crc kubenswrapper[4690]: I0320 13:46:24.395674 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Mar 20 13:46:24 crc kubenswrapper[4690]: I0320 13:46:24.396502 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Mar 20 13:46:25 crc kubenswrapper[4690]: I0320 13:46:25.100136 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="688c9fe8-f4e6-4e49-9e20-b1f3adbefa78" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Mar 20 13:46:25 crc kubenswrapper[4690]: I0320 13:46:25.100117 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="688c9fe8-f4e6-4e49-9e20-b1f3adbefa78" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.216:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Mar 20 13:46:25 crc kubenswrapper[4690]: I0320 13:46:25.412193 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8620ab8a-7126-4218-aea8-618f506ca17c" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.217:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Mar 20 13:46:25 crc kubenswrapper[4690]: I0320 13:46:25.412253 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="8620ab8a-7126-4218-aea8-618f506ca17c" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.217:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Mar 20 13:46:26 crc kubenswrapper[4690]: I0320 13:46:26.800963 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Mar 20 13:46:32 crc kubenswrapper[4690]: I0320 13:46:32.081129 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Mar 20 13:46:32 crc kubenswrapper[4690]: I0320 13:46:32.081631 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Mar 20 13:46:32 crc kubenswrapper[4690]: I0320 13:46:32.395527 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Mar 20 13:46:32 crc kubenswrapper[4690]: I0320 13:46:32.395884 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Mar 20 13:46:34 crc kubenswrapper[4690]: I0320 13:46:34.089383 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Mar 20 13:46:34 crc kubenswrapper[4690]: I0320 13:46:34.094350 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Mar 20 13:46:34 crc kubenswrapper[4690]: I0320 13:46:34.096662 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Mar 20 13:46:34 crc kubenswrapper[4690]: I0320 13:46:34.410262 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Mar 20 13:46:34 crc kubenswrapper[4690]: I0320 13:46:34.412596 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Mar 20 13:46:34 crc kubenswrapper[4690]: I0320 13:46:34.435679 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Mar 20 13:46:34 crc kubenswrapper[4690]: I0320 13:46:34.893846 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Mar 20 13:46:34 crc kubenswrapper[4690]: I0320 13:46:34.895371 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Mar 20 13:46:52 crc kubenswrapper[4690]: I0320 13:46:52.896004 4690 scope.go:117] "RemoveContainer" containerID="c7ee29cec07ad217a06541d2db38c4e16b1b44ed4f6201aa4baa7c7d0229eded" Mar 20 13:47:07 crc kubenswrapper[4690]: I0320 13:47:07.965248 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-s4h8k"] Mar 20 13:47:07 crc kubenswrapper[4690]: I0320 13:47:07.967745 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:07 crc kubenswrapper[4690]: I0320 13:47:07.987698 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s4h8k"] Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.076836 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-catalog-content\") pod \"redhat-operators-s4h8k\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.076919 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t7g6\" (UniqueName: \"kubernetes.io/projected/339ef72b-a828-43e0-9734-485ab621a43e-kube-api-access-8t7g6\") pod \"redhat-operators-s4h8k\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.077230 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-utilities\") pod \"redhat-operators-s4h8k\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.179780 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-utilities\") pod \"redhat-operators-s4h8k\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.179985 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-catalog-content\") pod \"redhat-operators-s4h8k\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.180036 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t7g6\" (UniqueName: \"kubernetes.io/projected/339ef72b-a828-43e0-9734-485ab621a43e-kube-api-access-8t7g6\") pod \"redhat-operators-s4h8k\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.181168 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-utilities\") pod \"redhat-operators-s4h8k\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.181532 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-catalog-content\") pod \"redhat-operators-s4h8k\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.199423 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t7g6\" (UniqueName: \"kubernetes.io/projected/339ef72b-a828-43e0-9734-485ab621a43e-kube-api-access-8t7g6\") pod \"redhat-operators-s4h8k\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.291520 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:08 crc kubenswrapper[4690]: I0320 13:47:08.808906 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-s4h8k"] Mar 20 13:47:08 crc kubenswrapper[4690]: W0320 13:47:08.817491 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod339ef72b_a828_43e0_9734_485ab621a43e.slice/crio-a83ffe7f1aeea1ecd72341f11160b3e0ed46976c00e5302da8eb8c93f7cd2ac0 WatchSource:0}: Error finding container a83ffe7f1aeea1ecd72341f11160b3e0ed46976c00e5302da8eb8c93f7cd2ac0: Status 404 returned error can't find the container with id a83ffe7f1aeea1ecd72341f11160b3e0ed46976c00e5302da8eb8c93f7cd2ac0 Mar 20 13:47:09 crc kubenswrapper[4690]: I0320 13:47:09.256489 4690 generic.go:334] "Generic (PLEG): container finished" podID="339ef72b-a828-43e0-9734-485ab621a43e" containerID="ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3" exitCode=0 Mar 20 13:47:09 crc kubenswrapper[4690]: I0320 13:47:09.256541 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4h8k" event={"ID":"339ef72b-a828-43e0-9734-485ab621a43e","Type":"ContainerDied","Data":"ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3"} Mar 20 13:47:09 crc kubenswrapper[4690]: I0320 13:47:09.256573 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4h8k" event={"ID":"339ef72b-a828-43e0-9734-485ab621a43e","Type":"ContainerStarted","Data":"a83ffe7f1aeea1ecd72341f11160b3e0ed46976c00e5302da8eb8c93f7cd2ac0"} Mar 20 13:47:09 crc kubenswrapper[4690]: I0320 13:47:09.258368 4690 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 13:47:11 crc kubenswrapper[4690]: I0320 13:47:11.295967 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4h8k" event={"ID":"339ef72b-a828-43e0-9734-485ab621a43e","Type":"ContainerStarted","Data":"3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed"} Mar 20 13:47:12 crc kubenswrapper[4690]: I0320 13:47:12.323840 4690 generic.go:334] "Generic (PLEG): container finished" podID="339ef72b-a828-43e0-9734-485ab621a43e" containerID="3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed" exitCode=0 Mar 20 13:47:12 crc kubenswrapper[4690]: I0320 13:47:12.323957 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4h8k" event={"ID":"339ef72b-a828-43e0-9734-485ab621a43e","Type":"ContainerDied","Data":"3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed"} Mar 20 13:47:14 crc kubenswrapper[4690]: I0320 13:47:14.353452 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4h8k" event={"ID":"339ef72b-a828-43e0-9734-485ab621a43e","Type":"ContainerStarted","Data":"1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db"} Mar 20 13:47:14 crc kubenswrapper[4690]: I0320 13:47:14.385589 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-s4h8k" podStartSLOduration=3.3705191340000002 podStartE2EDuration="7.385562473s" podCreationTimestamp="2026-03-20 13:47:07 +0000 UTC" firstStartedPulling="2026-03-20 13:47:09.258081844 +0000 UTC m=+1475.547681787" lastFinishedPulling="2026-03-20 13:47:13.273125183 +0000 UTC m=+1479.562725126" observedRunningTime="2026-03-20 13:47:14.376880836 +0000 UTC m=+1480.666480819" watchObservedRunningTime="2026-03-20 13:47:14.385562473 +0000 UTC m=+1480.675162446" Mar 20 13:47:18 crc kubenswrapper[4690]: I0320 13:47:18.291762 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:18 crc kubenswrapper[4690]: I0320 13:47:18.292815 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:19 crc kubenswrapper[4690]: I0320 13:47:19.358683 4690 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-s4h8k" podUID="339ef72b-a828-43e0-9734-485ab621a43e" containerName="registry-server" probeResult="failure" output=< Mar 20 13:47:19 crc kubenswrapper[4690]: timeout: failed to connect service ":50051" within 1s Mar 20 13:47:19 crc kubenswrapper[4690]: > Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.058666 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-82724/must-gather-zvwq2"] Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.060679 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.067907 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-82724"/"openshift-service-ca.crt" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.068840 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-82724"/"default-dockercfg-nx8l2" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.068982 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-82724"/"kube-root-ca.crt" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.096634 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-82724/must-gather-zvwq2"] Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.120605 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bb362278-db9c-48b1-94c8-6da00bd25d9e-must-gather-output\") pod \"must-gather-zvwq2\" (UID: \"bb362278-db9c-48b1-94c8-6da00bd25d9e\") " pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.120657 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ps6fw\" (UniqueName: \"kubernetes.io/projected/bb362278-db9c-48b1-94c8-6da00bd25d9e-kube-api-access-ps6fw\") pod \"must-gather-zvwq2\" (UID: \"bb362278-db9c-48b1-94c8-6da00bd25d9e\") " pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.222868 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bb362278-db9c-48b1-94c8-6da00bd25d9e-must-gather-output\") pod \"must-gather-zvwq2\" (UID: \"bb362278-db9c-48b1-94c8-6da00bd25d9e\") " pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.222933 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ps6fw\" (UniqueName: \"kubernetes.io/projected/bb362278-db9c-48b1-94c8-6da00bd25d9e-kube-api-access-ps6fw\") pod \"must-gather-zvwq2\" (UID: \"bb362278-db9c-48b1-94c8-6da00bd25d9e\") " pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.223384 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bb362278-db9c-48b1-94c8-6da00bd25d9e-must-gather-output\") pod \"must-gather-zvwq2\" (UID: \"bb362278-db9c-48b1-94c8-6da00bd25d9e\") " pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.239977 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ps6fw\" (UniqueName: \"kubernetes.io/projected/bb362278-db9c-48b1-94c8-6da00bd25d9e-kube-api-access-ps6fw\") pod \"must-gather-zvwq2\" (UID: \"bb362278-db9c-48b1-94c8-6da00bd25d9e\") " pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.387761 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:47:23 crc kubenswrapper[4690]: I0320 13:47:23.874942 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-82724/must-gather-zvwq2"] Mar 20 13:47:24 crc kubenswrapper[4690]: I0320 13:47:24.486670 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-82724/must-gather-zvwq2" event={"ID":"bb362278-db9c-48b1-94c8-6da00bd25d9e","Type":"ContainerStarted","Data":"3968feb98706e50645ee284cfe20799f77a3f5d7874dca2170799611f77dfe60"} Mar 20 13:47:28 crc kubenswrapper[4690]: I0320 13:47:28.339206 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:28 crc kubenswrapper[4690]: I0320 13:47:28.390154 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:28 crc kubenswrapper[4690]: I0320 13:47:28.530704 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-82724/must-gather-zvwq2" event={"ID":"bb362278-db9c-48b1-94c8-6da00bd25d9e","Type":"ContainerStarted","Data":"da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b"} Mar 20 13:47:28 crc kubenswrapper[4690]: I0320 13:47:28.530754 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-82724/must-gather-zvwq2" event={"ID":"bb362278-db9c-48b1-94c8-6da00bd25d9e","Type":"ContainerStarted","Data":"6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f"} Mar 20 13:47:28 crc kubenswrapper[4690]: I0320 13:47:28.625770 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s4h8k"] Mar 20 13:47:28 crc kubenswrapper[4690]: I0320 13:47:28.631417 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-82724/must-gather-zvwq2" podStartSLOduration=1.8546554450000001 podStartE2EDuration="5.631396819s" podCreationTimestamp="2026-03-20 13:47:23 +0000 UTC" firstStartedPulling="2026-03-20 13:47:23.892870587 +0000 UTC m=+1490.182470530" lastFinishedPulling="2026-03-20 13:47:27.669611961 +0000 UTC m=+1493.959211904" observedRunningTime="2026-03-20 13:47:28.616442982 +0000 UTC m=+1494.906042945" watchObservedRunningTime="2026-03-20 13:47:28.631396819 +0000 UTC m=+1494.920996782" Mar 20 13:47:29 crc kubenswrapper[4690]: I0320 13:47:29.544239 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-s4h8k" podUID="339ef72b-a828-43e0-9734-485ab621a43e" containerName="registry-server" containerID="cri-o://1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db" gracePeriod=2 Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.013671 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.187904 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-catalog-content\") pod \"339ef72b-a828-43e0-9734-485ab621a43e\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.188001 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8t7g6\" (UniqueName: \"kubernetes.io/projected/339ef72b-a828-43e0-9734-485ab621a43e-kube-api-access-8t7g6\") pod \"339ef72b-a828-43e0-9734-485ab621a43e\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.188107 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-utilities\") pod \"339ef72b-a828-43e0-9734-485ab621a43e\" (UID: \"339ef72b-a828-43e0-9734-485ab621a43e\") " Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.188661 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-utilities" (OuterVolumeSpecName: "utilities") pod "339ef72b-a828-43e0-9734-485ab621a43e" (UID: "339ef72b-a828-43e0-9734-485ab621a43e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.197758 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/339ef72b-a828-43e0-9734-485ab621a43e-kube-api-access-8t7g6" (OuterVolumeSpecName: "kube-api-access-8t7g6") pod "339ef72b-a828-43e0-9734-485ab621a43e" (UID: "339ef72b-a828-43e0-9734-485ab621a43e"). InnerVolumeSpecName "kube-api-access-8t7g6". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.290267 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8t7g6\" (UniqueName: \"kubernetes.io/projected/339ef72b-a828-43e0-9734-485ab621a43e-kube-api-access-8t7g6\") on node \"crc\" DevicePath \"\"" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.290294 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.324450 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "339ef72b-a828-43e0-9734-485ab621a43e" (UID: "339ef72b-a828-43e0-9734-485ab621a43e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.391912 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/339ef72b-a828-43e0-9734-485ab621a43e-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.557736 4690 generic.go:334] "Generic (PLEG): container finished" podID="339ef72b-a828-43e0-9734-485ab621a43e" containerID="1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db" exitCode=0 Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.557788 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4h8k" event={"ID":"339ef72b-a828-43e0-9734-485ab621a43e","Type":"ContainerDied","Data":"1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db"} Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.557823 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-s4h8k" event={"ID":"339ef72b-a828-43e0-9734-485ab621a43e","Type":"ContainerDied","Data":"a83ffe7f1aeea1ecd72341f11160b3e0ed46976c00e5302da8eb8c93f7cd2ac0"} Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.557877 4690 scope.go:117] "RemoveContainer" containerID="1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.557974 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-s4h8k" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.600978 4690 scope.go:117] "RemoveContainer" containerID="3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.601138 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-s4h8k"] Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.615756 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-s4h8k"] Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.635003 4690 scope.go:117] "RemoveContainer" containerID="ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.674729 4690 scope.go:117] "RemoveContainer" containerID="1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db" Mar 20 13:47:30 crc kubenswrapper[4690]: E0320 13:47:30.675225 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db\": container with ID starting with 1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db not found: ID does not exist" containerID="1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.675278 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db"} err="failed to get container status \"1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db\": rpc error: code = NotFound desc = could not find container \"1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db\": container with ID starting with 1b3b24d2693618aab590dd98a1cb9b7bf81926be77162064117df6358009d1db not found: ID does not exist" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.675312 4690 scope.go:117] "RemoveContainer" containerID="3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed" Mar 20 13:47:30 crc kubenswrapper[4690]: E0320 13:47:30.675812 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed\": container with ID starting with 3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed not found: ID does not exist" containerID="3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.675990 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed"} err="failed to get container status \"3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed\": rpc error: code = NotFound desc = could not find container \"3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed\": container with ID starting with 3760203677b3beee747a036697f4310d9b392ec8e44c10a428d07f6c07c434ed not found: ID does not exist" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.676099 4690 scope.go:117] "RemoveContainer" containerID="ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3" Mar 20 13:47:30 crc kubenswrapper[4690]: E0320 13:47:30.676555 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3\": container with ID starting with ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3 not found: ID does not exist" containerID="ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3" Mar 20 13:47:30 crc kubenswrapper[4690]: I0320 13:47:30.676597 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3"} err="failed to get container status \"ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3\": rpc error: code = NotFound desc = could not find container \"ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3\": container with ID starting with ff9aa7fa28822a6605f62c8c81f130e10df255666a284d22b1d0f4557b8933c3 not found: ID does not exist" Mar 20 13:47:32 crc kubenswrapper[4690]: I0320 13:47:32.424780 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="339ef72b-a828-43e0-9734-485ab621a43e" path="/var/lib/kubelet/pods/339ef72b-a828-43e0-9734-485ab621a43e/volumes" Mar 20 13:47:32 crc kubenswrapper[4690]: I0320 13:47:32.840538 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-82724/crc-debug-92qhv"] Mar 20 13:47:32 crc kubenswrapper[4690]: E0320 13:47:32.841343 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="339ef72b-a828-43e0-9734-485ab621a43e" containerName="extract-utilities" Mar 20 13:47:32 crc kubenswrapper[4690]: I0320 13:47:32.841365 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="339ef72b-a828-43e0-9734-485ab621a43e" containerName="extract-utilities" Mar 20 13:47:32 crc kubenswrapper[4690]: E0320 13:47:32.841376 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="339ef72b-a828-43e0-9734-485ab621a43e" containerName="registry-server" Mar 20 13:47:32 crc kubenswrapper[4690]: I0320 13:47:32.841384 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="339ef72b-a828-43e0-9734-485ab621a43e" containerName="registry-server" Mar 20 13:47:32 crc kubenswrapper[4690]: E0320 13:47:32.841429 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="339ef72b-a828-43e0-9734-485ab621a43e" containerName="extract-content" Mar 20 13:47:32 crc kubenswrapper[4690]: I0320 13:47:32.841436 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="339ef72b-a828-43e0-9734-485ab621a43e" containerName="extract-content" Mar 20 13:47:32 crc kubenswrapper[4690]: I0320 13:47:32.841644 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="339ef72b-a828-43e0-9734-485ab621a43e" containerName="registry-server" Mar 20 13:47:32 crc kubenswrapper[4690]: I0320 13:47:32.842381 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:47:32 crc kubenswrapper[4690]: I0320 13:47:32.937585 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vwxn\" (UniqueName: \"kubernetes.io/projected/10045e3d-2085-4a20-8fdd-c2542a889171-kube-api-access-5vwxn\") pod \"crc-debug-92qhv\" (UID: \"10045e3d-2085-4a20-8fdd-c2542a889171\") " pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:47:32 crc kubenswrapper[4690]: I0320 13:47:32.937647 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10045e3d-2085-4a20-8fdd-c2542a889171-host\") pod \"crc-debug-92qhv\" (UID: \"10045e3d-2085-4a20-8fdd-c2542a889171\") " pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:47:33 crc kubenswrapper[4690]: I0320 13:47:33.052097 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vwxn\" (UniqueName: \"kubernetes.io/projected/10045e3d-2085-4a20-8fdd-c2542a889171-kube-api-access-5vwxn\") pod \"crc-debug-92qhv\" (UID: \"10045e3d-2085-4a20-8fdd-c2542a889171\") " pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:47:33 crc kubenswrapper[4690]: I0320 13:47:33.052156 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10045e3d-2085-4a20-8fdd-c2542a889171-host\") pod \"crc-debug-92qhv\" (UID: \"10045e3d-2085-4a20-8fdd-c2542a889171\") " pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:47:33 crc kubenswrapper[4690]: I0320 13:47:33.052409 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10045e3d-2085-4a20-8fdd-c2542a889171-host\") pod \"crc-debug-92qhv\" (UID: \"10045e3d-2085-4a20-8fdd-c2542a889171\") " pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:47:33 crc kubenswrapper[4690]: I0320 13:47:33.098365 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vwxn\" (UniqueName: \"kubernetes.io/projected/10045e3d-2085-4a20-8fdd-c2542a889171-kube-api-access-5vwxn\") pod \"crc-debug-92qhv\" (UID: \"10045e3d-2085-4a20-8fdd-c2542a889171\") " pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:47:33 crc kubenswrapper[4690]: I0320 13:47:33.165707 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:47:33 crc kubenswrapper[4690]: I0320 13:47:33.594508 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-82724/crc-debug-92qhv" event={"ID":"10045e3d-2085-4a20-8fdd-c2542a889171","Type":"ContainerStarted","Data":"b18bce3f154a42ac806b24a68a278c3090597ebc06c4369982785031d64c571c"} Mar 20 13:47:33 crc kubenswrapper[4690]: I0320 13:47:33.829604 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:47:33 crc kubenswrapper[4690]: I0320 13:47:33.829662 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:47:44 crc kubenswrapper[4690]: I0320 13:47:44.694592 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-82724/crc-debug-92qhv" event={"ID":"10045e3d-2085-4a20-8fdd-c2542a889171","Type":"ContainerStarted","Data":"418043c73aa80775deb228b17b1b94ea5f7409fbf50499f0fd6c27dc8ddeef46"} Mar 20 13:47:44 crc kubenswrapper[4690]: I0320 13:47:44.714873 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-82724/crc-debug-92qhv" podStartSLOduration=2.280556121 podStartE2EDuration="12.714841445s" podCreationTimestamp="2026-03-20 13:47:32 +0000 UTC" firstStartedPulling="2026-03-20 13:47:33.219647644 +0000 UTC m=+1499.509247587" lastFinishedPulling="2026-03-20 13:47:43.653932968 +0000 UTC m=+1509.943532911" observedRunningTime="2026-03-20 13:47:44.708500144 +0000 UTC m=+1510.998100087" watchObservedRunningTime="2026-03-20 13:47:44.714841445 +0000 UTC m=+1511.004441388" Mar 20 13:47:59 crc kubenswrapper[4690]: I0320 13:47:59.839449 4690 generic.go:334] "Generic (PLEG): container finished" podID="10045e3d-2085-4a20-8fdd-c2542a889171" containerID="418043c73aa80775deb228b17b1b94ea5f7409fbf50499f0fd6c27dc8ddeef46" exitCode=0 Mar 20 13:47:59 crc kubenswrapper[4690]: I0320 13:47:59.839522 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-82724/crc-debug-92qhv" event={"ID":"10045e3d-2085-4a20-8fdd-c2542a889171","Type":"ContainerDied","Data":"418043c73aa80775deb228b17b1b94ea5f7409fbf50499f0fd6c27dc8ddeef46"} Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.159519 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566908-5qpcd"] Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.163453 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566908-5qpcd" Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.172885 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566908-5qpcd"] Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.179552 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.179934 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.180208 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.255159 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvgxz\" (UniqueName: \"kubernetes.io/projected/cda1a881-a0aa-4c7a-bfff-a3e22aeb8919-kube-api-access-xvgxz\") pod \"auto-csr-approver-29566908-5qpcd\" (UID: \"cda1a881-a0aa-4c7a-bfff-a3e22aeb8919\") " pod="openshift-infra/auto-csr-approver-29566908-5qpcd" Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.357234 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvgxz\" (UniqueName: \"kubernetes.io/projected/cda1a881-a0aa-4c7a-bfff-a3e22aeb8919-kube-api-access-xvgxz\") pod \"auto-csr-approver-29566908-5qpcd\" (UID: \"cda1a881-a0aa-4c7a-bfff-a3e22aeb8919\") " pod="openshift-infra/auto-csr-approver-29566908-5qpcd" Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.376924 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvgxz\" (UniqueName: \"kubernetes.io/projected/cda1a881-a0aa-4c7a-bfff-a3e22aeb8919-kube-api-access-xvgxz\") pod \"auto-csr-approver-29566908-5qpcd\" (UID: \"cda1a881-a0aa-4c7a-bfff-a3e22aeb8919\") " pod="openshift-infra/auto-csr-approver-29566908-5qpcd" Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.499183 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566908-5qpcd" Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.963258 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566908-5qpcd"] Mar 20 13:48:00 crc kubenswrapper[4690]: I0320 13:48:00.979035 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.010384 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-82724/crc-debug-92qhv"] Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.018571 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-82724/crc-debug-92qhv"] Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.073309 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10045e3d-2085-4a20-8fdd-c2542a889171-host\") pod \"10045e3d-2085-4a20-8fdd-c2542a889171\" (UID: \"10045e3d-2085-4a20-8fdd-c2542a889171\") " Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.073432 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vwxn\" (UniqueName: \"kubernetes.io/projected/10045e3d-2085-4a20-8fdd-c2542a889171-kube-api-access-5vwxn\") pod \"10045e3d-2085-4a20-8fdd-c2542a889171\" (UID: \"10045e3d-2085-4a20-8fdd-c2542a889171\") " Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.073736 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/10045e3d-2085-4a20-8fdd-c2542a889171-host" (OuterVolumeSpecName: "host") pod "10045e3d-2085-4a20-8fdd-c2542a889171" (UID: "10045e3d-2085-4a20-8fdd-c2542a889171"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.074344 4690 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/10045e3d-2085-4a20-8fdd-c2542a889171-host\") on node \"crc\" DevicePath \"\"" Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.080977 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10045e3d-2085-4a20-8fdd-c2542a889171-kube-api-access-5vwxn" (OuterVolumeSpecName: "kube-api-access-5vwxn") pod "10045e3d-2085-4a20-8fdd-c2542a889171" (UID: "10045e3d-2085-4a20-8fdd-c2542a889171"). InnerVolumeSpecName "kube-api-access-5vwxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.175860 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vwxn\" (UniqueName: \"kubernetes.io/projected/10045e3d-2085-4a20-8fdd-c2542a889171-kube-api-access-5vwxn\") on node \"crc\" DevicePath \"\"" Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.861489 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b18bce3f154a42ac806b24a68a278c3090597ebc06c4369982785031d64c571c" Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.861623 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/crc-debug-92qhv" Mar 20 13:48:01 crc kubenswrapper[4690]: I0320 13:48:01.874970 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566908-5qpcd" event={"ID":"cda1a881-a0aa-4c7a-bfff-a3e22aeb8919","Type":"ContainerStarted","Data":"6320fcc637a537815cc91c5d20d328c0234832cce1c9c506e72323913d03be75"} Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.280369 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-82724/crc-debug-lc52n"] Mar 20 13:48:02 crc kubenswrapper[4690]: E0320 13:48:02.280814 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10045e3d-2085-4a20-8fdd-c2542a889171" containerName="container-00" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.280831 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="10045e3d-2085-4a20-8fdd-c2542a889171" containerName="container-00" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.281080 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="10045e3d-2085-4a20-8fdd-c2542a889171" containerName="container-00" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.281771 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.396018 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5e1f5d71-437c-48af-9121-b4779ffca1fa-host\") pod \"crc-debug-lc52n\" (UID: \"5e1f5d71-437c-48af-9121-b4779ffca1fa\") " pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.396384 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8vwr\" (UniqueName: \"kubernetes.io/projected/5e1f5d71-437c-48af-9121-b4779ffca1fa-kube-api-access-l8vwr\") pod \"crc-debug-lc52n\" (UID: \"5e1f5d71-437c-48af-9121-b4779ffca1fa\") " pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.425192 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10045e3d-2085-4a20-8fdd-c2542a889171" path="/var/lib/kubelet/pods/10045e3d-2085-4a20-8fdd-c2542a889171/volumes" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.498743 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5e1f5d71-437c-48af-9121-b4779ffca1fa-host\") pod \"crc-debug-lc52n\" (UID: \"5e1f5d71-437c-48af-9121-b4779ffca1fa\") " pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.498907 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8vwr\" (UniqueName: \"kubernetes.io/projected/5e1f5d71-437c-48af-9121-b4779ffca1fa-kube-api-access-l8vwr\") pod \"crc-debug-lc52n\" (UID: \"5e1f5d71-437c-48af-9121-b4779ffca1fa\") " pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.500167 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5e1f5d71-437c-48af-9121-b4779ffca1fa-host\") pod \"crc-debug-lc52n\" (UID: \"5e1f5d71-437c-48af-9121-b4779ffca1fa\") " pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.525578 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8vwr\" (UniqueName: \"kubernetes.io/projected/5e1f5d71-437c-48af-9121-b4779ffca1fa-kube-api-access-l8vwr\") pod \"crc-debug-lc52n\" (UID: \"5e1f5d71-437c-48af-9121-b4779ffca1fa\") " pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.597137 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.884528 4690 generic.go:334] "Generic (PLEG): container finished" podID="cda1a881-a0aa-4c7a-bfff-a3e22aeb8919" containerID="96e277a9a8ed9b1d9afe8ddfcc42cd0e56fcfe2f4e1645e186d93eaa68185fde" exitCode=0 Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.884569 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566908-5qpcd" event={"ID":"cda1a881-a0aa-4c7a-bfff-a3e22aeb8919","Type":"ContainerDied","Data":"96e277a9a8ed9b1d9afe8ddfcc42cd0e56fcfe2f4e1645e186d93eaa68185fde"} Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.886427 4690 generic.go:334] "Generic (PLEG): container finished" podID="5e1f5d71-437c-48af-9121-b4779ffca1fa" containerID="f715b10fb1f9f439f27e0e8240456fe2f4e62ac6239bcfe19ecc81b77e42083b" exitCode=1 Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.886543 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-82724/crc-debug-lc52n" event={"ID":"5e1f5d71-437c-48af-9121-b4779ffca1fa","Type":"ContainerDied","Data":"f715b10fb1f9f439f27e0e8240456fe2f4e62ac6239bcfe19ecc81b77e42083b"} Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.886594 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-82724/crc-debug-lc52n" event={"ID":"5e1f5d71-437c-48af-9121-b4779ffca1fa","Type":"ContainerStarted","Data":"195022d76868405abadbaa0212910513e5596400a7ba0bcea225a317fa707ea2"} Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.940927 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-82724/crc-debug-lc52n"] Mar 20 13:48:02 crc kubenswrapper[4690]: I0320 13:48:02.953948 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-82724/crc-debug-lc52n"] Mar 20 13:48:03 crc kubenswrapper[4690]: I0320 13:48:03.831745 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:48:03 crc kubenswrapper[4690]: I0320 13:48:03.832265 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.027449 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.130039 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l8vwr\" (UniqueName: \"kubernetes.io/projected/5e1f5d71-437c-48af-9121-b4779ffca1fa-kube-api-access-l8vwr\") pod \"5e1f5d71-437c-48af-9121-b4779ffca1fa\" (UID: \"5e1f5d71-437c-48af-9121-b4779ffca1fa\") " Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.130307 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5e1f5d71-437c-48af-9121-b4779ffca1fa-host\") pod \"5e1f5d71-437c-48af-9121-b4779ffca1fa\" (UID: \"5e1f5d71-437c-48af-9121-b4779ffca1fa\") " Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.130975 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5e1f5d71-437c-48af-9121-b4779ffca1fa-host" (OuterVolumeSpecName: "host") pod "5e1f5d71-437c-48af-9121-b4779ffca1fa" (UID: "5e1f5d71-437c-48af-9121-b4779ffca1fa"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.159106 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e1f5d71-437c-48af-9121-b4779ffca1fa-kube-api-access-l8vwr" (OuterVolumeSpecName: "kube-api-access-l8vwr") pod "5e1f5d71-437c-48af-9121-b4779ffca1fa" (UID: "5e1f5d71-437c-48af-9121-b4779ffca1fa"). InnerVolumeSpecName "kube-api-access-l8vwr". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.232060 4690 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5e1f5d71-437c-48af-9121-b4779ffca1fa-host\") on node \"crc\" DevicePath \"\"" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.232096 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l8vwr\" (UniqueName: \"kubernetes.io/projected/5e1f5d71-437c-48af-9121-b4779ffca1fa-kube-api-access-l8vwr\") on node \"crc\" DevicePath \"\"" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.308931 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566908-5qpcd" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.333027 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xvgxz\" (UniqueName: \"kubernetes.io/projected/cda1a881-a0aa-4c7a-bfff-a3e22aeb8919-kube-api-access-xvgxz\") pod \"cda1a881-a0aa-4c7a-bfff-a3e22aeb8919\" (UID: \"cda1a881-a0aa-4c7a-bfff-a3e22aeb8919\") " Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.337062 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cda1a881-a0aa-4c7a-bfff-a3e22aeb8919-kube-api-access-xvgxz" (OuterVolumeSpecName: "kube-api-access-xvgxz") pod "cda1a881-a0aa-4c7a-bfff-a3e22aeb8919" (UID: "cda1a881-a0aa-4c7a-bfff-a3e22aeb8919"). InnerVolumeSpecName "kube-api-access-xvgxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.427359 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e1f5d71-437c-48af-9121-b4779ffca1fa" path="/var/lib/kubelet/pods/5e1f5d71-437c-48af-9121-b4779ffca1fa/volumes" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.435187 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xvgxz\" (UniqueName: \"kubernetes.io/projected/cda1a881-a0aa-4c7a-bfff-a3e22aeb8919-kube-api-access-xvgxz\") on node \"crc\" DevicePath \"\"" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.953434 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566908-5qpcd" event={"ID":"cda1a881-a0aa-4c7a-bfff-a3e22aeb8919","Type":"ContainerDied","Data":"6320fcc637a537815cc91c5d20d328c0234832cce1c9c506e72323913d03be75"} Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.953887 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6320fcc637a537815cc91c5d20d328c0234832cce1c9c506e72323913d03be75" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.953517 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566908-5qpcd" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.960729 4690 scope.go:117] "RemoveContainer" containerID="f715b10fb1f9f439f27e0e8240456fe2f4e62ac6239bcfe19ecc81b77e42083b" Mar 20 13:48:04 crc kubenswrapper[4690]: I0320 13:48:04.961322 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/crc-debug-lc52n" Mar 20 13:48:05 crc kubenswrapper[4690]: I0320 13:48:05.375952 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566902-9qjq2"] Mar 20 13:48:05 crc kubenswrapper[4690]: I0320 13:48:05.383462 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566902-9qjq2"] Mar 20 13:48:06 crc kubenswrapper[4690]: I0320 13:48:06.426210 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="19081a20-821d-49bd-abd4-7788cab48b2d" path="/var/lib/kubelet/pods/19081a20-821d-49bd-abd4-7788cab48b2d/volumes" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.607969 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kjppm"] Mar 20 13:48:26 crc kubenswrapper[4690]: E0320 13:48:26.609871 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e1f5d71-437c-48af-9121-b4779ffca1fa" containerName="container-00" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.609903 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e1f5d71-437c-48af-9121-b4779ffca1fa" containerName="container-00" Mar 20 13:48:26 crc kubenswrapper[4690]: E0320 13:48:26.609931 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cda1a881-a0aa-4c7a-bfff-a3e22aeb8919" containerName="oc" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.609940 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="cda1a881-a0aa-4c7a-bfff-a3e22aeb8919" containerName="oc" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.610179 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e1f5d71-437c-48af-9121-b4779ffca1fa" containerName="container-00" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.610213 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="cda1a881-a0aa-4c7a-bfff-a3e22aeb8919" containerName="oc" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.611872 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.617834 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kjppm"] Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.665493 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-utilities\") pod \"community-operators-kjppm\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.665546 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-catalog-content\") pod \"community-operators-kjppm\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.665571 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-srfdl\" (UniqueName: \"kubernetes.io/projected/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-kube-api-access-srfdl\") pod \"community-operators-kjppm\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.766552 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-catalog-content\") pod \"community-operators-kjppm\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.766602 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-srfdl\" (UniqueName: \"kubernetes.io/projected/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-kube-api-access-srfdl\") pod \"community-operators-kjppm\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.766733 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-utilities\") pod \"community-operators-kjppm\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.767138 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-catalog-content\") pod \"community-operators-kjppm\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.767381 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-utilities\") pod \"community-operators-kjppm\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.797040 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-srfdl\" (UniqueName: \"kubernetes.io/projected/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-kube-api-access-srfdl\") pod \"community-operators-kjppm\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:26 crc kubenswrapper[4690]: I0320 13:48:26.934017 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:27 crc kubenswrapper[4690]: I0320 13:48:27.437097 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kjppm"] Mar 20 13:48:28 crc kubenswrapper[4690]: I0320 13:48:28.183716 4690 generic.go:334] "Generic (PLEG): container finished" podID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerID="f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5" exitCode=0 Mar 20 13:48:28 crc kubenswrapper[4690]: I0320 13:48:28.183772 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjppm" event={"ID":"4df31adf-8c2a-46e9-9162-a795a8ff5d8a","Type":"ContainerDied","Data":"f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5"} Mar 20 13:48:28 crc kubenswrapper[4690]: I0320 13:48:28.183987 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjppm" event={"ID":"4df31adf-8c2a-46e9-9162-a795a8ff5d8a","Type":"ContainerStarted","Data":"a2be2efca155ca4a8ee32021696eae9c2b128e672768b358e08c08f34de9a9ef"} Mar 20 13:48:30 crc kubenswrapper[4690]: I0320 13:48:30.958681 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjppm" event={"ID":"4df31adf-8c2a-46e9-9162-a795a8ff5d8a","Type":"ContainerStarted","Data":"4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5"} Mar 20 13:48:32 crc kubenswrapper[4690]: I0320 13:48:32.981180 4690 generic.go:334] "Generic (PLEG): container finished" podID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerID="4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5" exitCode=0 Mar 20 13:48:32 crc kubenswrapper[4690]: I0320 13:48:32.981293 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjppm" event={"ID":"4df31adf-8c2a-46e9-9162-a795a8ff5d8a","Type":"ContainerDied","Data":"4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5"} Mar 20 13:48:33 crc kubenswrapper[4690]: I0320 13:48:33.829188 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:48:33 crc kubenswrapper[4690]: I0320 13:48:33.829641 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:48:33 crc kubenswrapper[4690]: I0320 13:48:33.829687 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:48:33 crc kubenswrapper[4690]: I0320 13:48:33.830473 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5e696252e251066c1296443f70dfdb2d4815582a27f8d0fb1a1a2dd90457b26f"} pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 13:48:33 crc kubenswrapper[4690]: I0320 13:48:33.830547 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" containerID="cri-o://5e696252e251066c1296443f70dfdb2d4815582a27f8d0fb1a1a2dd90457b26f" gracePeriod=600 Mar 20 13:48:34 crc kubenswrapper[4690]: I0320 13:48:34.005984 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjppm" event={"ID":"4df31adf-8c2a-46e9-9162-a795a8ff5d8a","Type":"ContainerStarted","Data":"7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f"} Mar 20 13:48:34 crc kubenswrapper[4690]: I0320 13:48:34.010489 4690 generic.go:334] "Generic (PLEG): container finished" podID="60ded650-b298-4115-8286-8969b94d4062" containerID="5e696252e251066c1296443f70dfdb2d4815582a27f8d0fb1a1a2dd90457b26f" exitCode=0 Mar 20 13:48:34 crc kubenswrapper[4690]: I0320 13:48:34.010530 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerDied","Data":"5e696252e251066c1296443f70dfdb2d4815582a27f8d0fb1a1a2dd90457b26f"} Mar 20 13:48:34 crc kubenswrapper[4690]: I0320 13:48:34.010560 4690 scope.go:117] "RemoveContainer" containerID="61fd0b68cc3ec6d77f02280694bc855224d1387694bd1b6f59471ac008b5cb66" Mar 20 13:48:34 crc kubenswrapper[4690]: I0320 13:48:34.035607 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kjppm" podStartSLOduration=2.686351187 podStartE2EDuration="8.035582105s" podCreationTimestamp="2026-03-20 13:48:26 +0000 UTC" firstStartedPulling="2026-03-20 13:48:28.186641866 +0000 UTC m=+1554.476241809" lastFinishedPulling="2026-03-20 13:48:33.535872774 +0000 UTC m=+1559.825472727" observedRunningTime="2026-03-20 13:48:34.028683168 +0000 UTC m=+1560.318283121" watchObservedRunningTime="2026-03-20 13:48:34.035582105 +0000 UTC m=+1560.325182058" Mar 20 13:48:35 crc kubenswrapper[4690]: I0320 13:48:35.022771 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651"} Mar 20 13:48:36 crc kubenswrapper[4690]: I0320 13:48:36.934432 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:36 crc kubenswrapper[4690]: I0320 13:48:36.934947 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:37 crc kubenswrapper[4690]: I0320 13:48:37.014680 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:37 crc kubenswrapper[4690]: I0320 13:48:37.714425 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-78ee-account-create-update-88b9m_02be9901-f882-45b3-8d1e-9105f2551417/mariadb-account-create-update/0.log" Mar 20 13:48:37 crc kubenswrapper[4690]: I0320 13:48:37.857926 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-74474d96d6-48nxq_1e178592-3eb7-4d02-8a14-08d18a96e289/barbican-api/0.log" Mar 20 13:48:37 crc kubenswrapper[4690]: I0320 13:48:37.928998 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-74474d96d6-48nxq_1e178592-3eb7-4d02-8a14-08d18a96e289/barbican-api-log/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.009245 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-db-create-fhxfq_0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa/mariadb-database-create/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.102754 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-db-sync-vgnp6_6629e615-4e98-4e99-b7dc-6990b379d93c/barbican-db-sync/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.186373 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6799cfc5db-x4fzm_73fc017b-172e-4785-850e-2146a070b915/barbican-keystone-listener/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.271078 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-6799cfc5db-x4fzm_73fc017b-172e-4785-850e-2146a070b915/barbican-keystone-listener-log/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.339817 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-854468899-d6c5x_381e146e-ca50-42cb-9e5b-e4e794c77d28/barbican-worker/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.355575 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-854468899-d6c5x_381e146e-ca50-42cb-9e5b-e4e794c77d28/barbican-worker-log/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.540106 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c121e50a-142e-42c2-b5a9-6d569d18176e/proxy-httpd/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.556150 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c121e50a-142e-42c2-b5a9-6d569d18176e/ceilometer-central-agent/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.572200 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c121e50a-142e-42c2-b5a9-6d569d18176e/ceilometer-notification-agent/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.763082 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_c121e50a-142e-42c2-b5a9-6d569d18176e/sg-core/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.801097 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-9a17-account-create-update-jcbrv_519b3e37-0f94-4018-97e2-7c7b0b99df0d/mariadb-account-create-update/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.922713 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_90a721fb-0ffe-4b4f-890c-97dca6ee9303/cinder-api/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.955036 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_90a721fb-0ffe-4b4f-890c-97dca6ee9303/cinder-api-log/0.log" Mar 20 13:48:38 crc kubenswrapper[4690]: I0320 13:48:38.995001 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-db-create-hw7cv_94049a0c-7da4-43be-8e15-36e9a282f728/mariadb-database-create/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.134622 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-db-sync-6rgrr_3def27d2-bdda-4c07-b4b2-f695994bd509/cinder-db-sync/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.267069 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b820f2a9-930b-44d0-a2c3-ad73e87c4ebb/cinder-scheduler/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.290331 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_b820f2a9-930b-44d0-a2c3-ad73e87c4ebb/probe/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.394815 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cd5cbd7b9-qk7qx_91d61f03-56dd-4e92-a723-2cf8f6f018ca/init/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.558557 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cd5cbd7b9-qk7qx_91d61f03-56dd-4e92-a723-2cf8f6f018ca/init/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.620606 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-cd5cbd7b9-qk7qx_91d61f03-56dd-4e92-a723-2cf8f6f018ca/dnsmasq-dns/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.621149 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-245d-account-create-update-gsx28_57aa5abf-4617-4b31-8a02-2721982d912c/mariadb-account-create-update/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.785343 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-db-create-thbgv_f89adcb7-be07-48cd-8e10-0c8509a96029/mariadb-database-create/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.872019 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-db-sync-rf8w7_e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e/glance-db-sync/0.log" Mar 20 13:48:39 crc kubenswrapper[4690]: I0320 13:48:39.976362 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_8a099cdf-48ab-4e3a-9d46-88d38d63bdc4/glance-httpd/0.log" Mar 20 13:48:40 crc kubenswrapper[4690]: I0320 13:48:40.062215 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_8a099cdf-48ab-4e3a-9d46-88d38d63bdc4/glance-log/0.log" Mar 20 13:48:40 crc kubenswrapper[4690]: I0320 13:48:40.148066 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ae12c55b-fd78-4068-bce5-44f82d474701/glance-httpd/0.log" Mar 20 13:48:40 crc kubenswrapper[4690]: I0320 13:48:40.180565 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ae12c55b-fd78-4068-bce5-44f82d474701/glance-log/0.log" Mar 20 13:48:40 crc kubenswrapper[4690]: I0320 13:48:40.383221 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7946cd7f64-rm6mr_ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596/horizon-log/0.log" Mar 20 13:48:40 crc kubenswrapper[4690]: I0320 13:48:40.389003 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-7946cd7f64-rm6mr_ab6bbfdf-7171-4b1c-a7b8-9f956d1ad596/horizon/0.log" Mar 20 13:48:40 crc kubenswrapper[4690]: I0320 13:48:40.466888 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-008d-account-create-update-mw8ch_0a16960c-b84b-4b25-b51a-9f5dad54e473/mariadb-account-create-update/0.log" Mar 20 13:48:40 crc kubenswrapper[4690]: I0320 13:48:40.647043 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7b6fc496fc-2z4sr_8dbf4cf0-ef78-4bb9-b10e-765d8b4045fa/keystone-api/0.log" Mar 20 13:48:40 crc kubenswrapper[4690]: I0320 13:48:40.684008 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-bootstrap-42k9z_eea9b8e9-b9d0-49ca-ad22-aaf7450c1007/keystone-bootstrap/0.log" Mar 20 13:48:40 crc kubenswrapper[4690]: I0320 13:48:40.978787 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-create-2dbfj_c82e598b-5f84-4e68-aa8f-5682574fcae9/mariadb-database-create/0.log" Mar 20 13:48:41 crc kubenswrapper[4690]: I0320 13:48:41.013706 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-db-sync-r8lpw_75808517-3db4-41a1-ac99-99324152c26d/keystone-db-sync/0.log" Mar 20 13:48:41 crc kubenswrapper[4690]: I0320 13:48:41.141250 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_c60cea38-9cd8-4c38-94d4-3eefa840b455/kube-state-metrics/0.log" Mar 20 13:48:41 crc kubenswrapper[4690]: I0320 13:48:41.389602 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-64dbdc6bf-bqlx5_ece8bb9b-d177-45ba-8888-0c3df7c38bb8/neutron-api/0.log" Mar 20 13:48:41 crc kubenswrapper[4690]: I0320 13:48:41.450697 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-64dbdc6bf-bqlx5_ece8bb9b-d177-45ba-8888-0c3df7c38bb8/neutron-httpd/0.log" Mar 20 13:48:41 crc kubenswrapper[4690]: I0320 13:48:41.671362 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-db-create-gblwq_49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9/mariadb-database-create/0.log" Mar 20 13:48:41 crc kubenswrapper[4690]: I0320 13:48:41.699465 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-be37-account-create-update-2w6zx_b4fcecc7-f191-472f-abcc-d886648e5ecc/mariadb-account-create-update/0.log" Mar 20 13:48:41 crc kubenswrapper[4690]: I0320 13:48:41.827718 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-db-sync-8h88w_badac960-83c0-4715-b125-0fdd44ae7315/neutron-db-sync/0.log" Mar 20 13:48:41 crc kubenswrapper[4690]: I0320 13:48:41.942888 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8620ab8a-7126-4218-aea8-618f506ca17c/nova-api-api/0.log" Mar 20 13:48:42 crc kubenswrapper[4690]: I0320 13:48:42.057930 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_8620ab8a-7126-4218-aea8-618f506ca17c/nova-api-log/0.log" Mar 20 13:48:42 crc kubenswrapper[4690]: I0320 13:48:42.147667 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-619b-account-create-update-vld5x_d7435c23-ad0c-484c-bc24-5cceb7e01ccc/mariadb-account-create-update/0.log" Mar 20 13:48:42 crc kubenswrapper[4690]: I0320 13:48:42.287067 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-db-create-x7tz6_a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b/mariadb-database-create/0.log" Mar 20 13:48:42 crc kubenswrapper[4690]: I0320 13:48:42.375958 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-9b74-account-create-update-qrqkx_817d49d2-79e2-42f0-b503-bd6bf78f1459/mariadb-account-create-update/0.log" Mar 20 13:48:42 crc kubenswrapper[4690]: I0320 13:48:42.497878 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-cell-mapping-sb7x2_69168ed4-2cdf-4be8-8ae0-917d89a54670/nova-manage/0.log" Mar 20 13:48:42 crc kubenswrapper[4690]: I0320 13:48:42.686516 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-db-sync-fvfgz_8ecaabd8-5cb5-4e0f-b5c8-c73075e68880/nova-cell0-conductor-db-sync/0.log" Mar 20 13:48:42 crc kubenswrapper[4690]: I0320 13:48:42.700119 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_4d06de43-2f21-4b70-8a38-9d7dbf386ada/nova-cell0-conductor-conductor/0.log" Mar 20 13:48:42 crc kubenswrapper[4690]: I0320 13:48:42.859068 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-db-create-tqb96_1ffaa372-aeed-471d-b5ba-f7692e1daad8/mariadb-database-create/0.log" Mar 20 13:48:42 crc kubenswrapper[4690]: I0320 13:48:42.944838 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-1ed3-account-create-update-xqkdf_b58df85c-1bf7-41ba-9839-d74172783a24/mariadb-account-create-update/0.log" Mar 20 13:48:43 crc kubenswrapper[4690]: I0320 13:48:43.071728 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-cell-mapping-fmsn9_ce04bc8c-c482-4ff3-a0b5-303db0874640/nova-manage/0.log" Mar 20 13:48:43 crc kubenswrapper[4690]: I0320 13:48:43.269739 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_37380d51-16da-4dc4-a30e-e0759035a9f4/nova-cell1-conductor-conductor/0.log" Mar 20 13:48:43 crc kubenswrapper[4690]: I0320 13:48:43.291104 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-db-sync-pr965_f50d733d-5439-49fc-af1b-bb36c5b3c739/nova-cell1-conductor-db-sync/0.log" Mar 20 13:48:43 crc kubenswrapper[4690]: I0320 13:48:43.464001 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-db-create-p7w2k_7338a418-a221-409b-bafd-666e7cc66a8e/mariadb-database-create/0.log" Mar 20 13:48:43 crc kubenswrapper[4690]: I0320 13:48:43.546268 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_2c33f4d2-a43a-427f-bd35-86c011e752e6/nova-cell1-novncproxy-novncproxy/0.log" Mar 20 13:48:43 crc kubenswrapper[4690]: I0320 13:48:43.731841 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_688c9fe8-f4e6-4e49-9e20-b1f3adbefa78/nova-metadata-metadata/0.log" Mar 20 13:48:43 crc kubenswrapper[4690]: I0320 13:48:43.749165 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_688c9fe8-f4e6-4e49-9e20-b1f3adbefa78/nova-metadata-log/0.log" Mar 20 13:48:43 crc kubenswrapper[4690]: I0320 13:48:43.947025 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_a81f6ca1-a67e-4cbc-99de-32701eccb13b/mysql-bootstrap/0.log" Mar 20 13:48:43 crc kubenswrapper[4690]: I0320 13:48:43.988616 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_a20b9cc4-387a-4e80-935e-e94a582e3843/nova-scheduler-scheduler/0.log" Mar 20 13:48:44 crc kubenswrapper[4690]: I0320 13:48:44.151809 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_a81f6ca1-a67e-4cbc-99de-32701eccb13b/galera/0.log" Mar 20 13:48:44 crc kubenswrapper[4690]: I0320 13:48:44.168614 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_a81f6ca1-a67e-4cbc-99de-32701eccb13b/mysql-bootstrap/0.log" Mar 20 13:48:44 crc kubenswrapper[4690]: I0320 13:48:44.189117 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5a4acc48-2e3c-4b76-b55e-e9152c405f11/mysql-bootstrap/0.log" Mar 20 13:48:44 crc kubenswrapper[4690]: I0320 13:48:44.398657 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5a4acc48-2e3c-4b76-b55e-e9152c405f11/galera/0.log" Mar 20 13:48:44 crc kubenswrapper[4690]: I0320 13:48:44.437470 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_ac9d04ea-6675-4512-8957-0b4d67157b15/openstackclient/0.log" Mar 20 13:48:44 crc kubenswrapper[4690]: I0320 13:48:44.448335 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_5a4acc48-2e3c-4b76-b55e-e9152c405f11/mysql-bootstrap/0.log" Mar 20 13:48:44 crc kubenswrapper[4690]: I0320 13:48:44.615660 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-f2m7w_db48a1e3-3cbe-4b9c-b68f-92a011543076/openstack-network-exporter/0.log" Mar 20 13:48:44 crc kubenswrapper[4690]: I0320 13:48:44.731208 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-mxmrl_9e0061bd-d72c-4aeb-86f0-154e0cccfe15/ovn-controller/0.log" Mar 20 13:48:44 crc kubenswrapper[4690]: I0320 13:48:44.874884 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-s6fhs_ba9868f8-3baf-4ecd-896c-1497873f32d7/ovsdb-server-init/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.058906 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-s6fhs_ba9868f8-3baf-4ecd-896c-1497873f32d7/ovsdb-server-init/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.139393 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-s6fhs_ba9868f8-3baf-4ecd-896c-1497873f32d7/ovsdb-server/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.141726 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-s6fhs_ba9868f8-3baf-4ecd-896c-1497873f32d7/ovs-vswitchd/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.262425 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_edbd8493-2301-46b3-b4ba-b60511e31302/openstack-network-exporter/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.287751 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_edbd8493-2301-46b3-b4ba-b60511e31302/ovn-northd/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.351652 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_26e022b9-e7f5-4787-abe8-9967d8f4d11e/openstack-network-exporter/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.536835 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8/openstack-network-exporter/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.543074 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_26e022b9-e7f5-4787-abe8-9967d8f4d11e/ovsdbserver-nb/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.582645 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_c0b5e5a2-350a-4cd6-92aa-2039e7f48cc8/ovsdbserver-sb/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.759228 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-899c-account-create-update-pd277_cc9e0393-9cc0-4120-8661-31fc5e0a77f6/mariadb-account-create-update/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.837578 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b994f67f8-wh5fd_31689f30-a5bb-4542-be03-9ca2c6aac585/placement-api/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.924378 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-b994f67f8-wh5fd_31689f30-a5bb-4542-be03-9ca2c6aac585/placement-log/0.log" Mar 20 13:48:45 crc kubenswrapper[4690]: I0320 13:48:45.991225 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-db-create-27tlv_d1f9d0e8-10b6-4aa6-ae3a-890f9e521253/mariadb-database-create/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.137182 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-db-sync-bsz48_d6a06dc2-5128-47d4-a10a-e2ba196ec0c9/placement-db-sync/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.254574 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b6c3ab56-9d3c-431c-a697-d6df19b67a21/setup-container/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.372962 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b6c3ab56-9d3c-431c-a697-d6df19b67a21/setup-container/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.414857 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b6c3ab56-9d3c-431c-a697-d6df19b67a21/rabbitmq/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.490656 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c9508cc5-d6ca-435f-949a-790440ed5f11/setup-container/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.720748 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c9508cc5-d6ca-435f-949a-790440ed5f11/setup-container/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.751473 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_root-account-create-update-nxkjc_4ca2df3b-74d4-4d7f-907f-7893a816cc3a/mariadb-account-create-update/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.756344 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c9508cc5-d6ca-435f-949a-790440ed5f11/rabbitmq/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.951567 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-58c55dcc8c-ddx5k_636ad0c0-e301-4d1b-8ad6-e4094424024f/proxy-httpd/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.974975 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-58c55dcc8c-ddx5k_636ad0c0-e301-4d1b-8ad6-e4094424024f/proxy-server/0.log" Mar 20 13:48:46 crc kubenswrapper[4690]: I0320 13:48:46.995503 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.066600 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kjppm"] Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.151076 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kjppm" podUID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerName="registry-server" containerID="cri-o://7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f" gracePeriod=2 Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.165475 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-57rps_0b26393a-1d00-4b21-a3b2-74518b7f0b3d/swift-ring-rebalance/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.194941 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/account-auditor/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.299818 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/account-reaper/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.417363 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/account-replicator/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.451133 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/account-server/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.509359 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/container-auditor/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.587244 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/container-replicator/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.592182 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.616649 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/container-server/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.650718 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/container-updater/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.693283 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-utilities\") pod \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.693519 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-catalog-content\") pod \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.693741 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-srfdl\" (UniqueName: \"kubernetes.io/projected/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-kube-api-access-srfdl\") pod \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\" (UID: \"4df31adf-8c2a-46e9-9162-a795a8ff5d8a\") " Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.694262 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-utilities" (OuterVolumeSpecName: "utilities") pod "4df31adf-8c2a-46e9-9162-a795a8ff5d8a" (UID: "4df31adf-8c2a-46e9-9162-a795a8ff5d8a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.714778 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-kube-api-access-srfdl" (OuterVolumeSpecName: "kube-api-access-srfdl") pod "4df31adf-8c2a-46e9-9162-a795a8ff5d8a" (UID: "4df31adf-8c2a-46e9-9162-a795a8ff5d8a"). InnerVolumeSpecName "kube-api-access-srfdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.759816 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4df31adf-8c2a-46e9-9162-a795a8ff5d8a" (UID: "4df31adf-8c2a-46e9-9162-a795a8ff5d8a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.769519 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/object-expirer/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.780807 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/object-auditor/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.796471 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-srfdl\" (UniqueName: \"kubernetes.io/projected/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-kube-api-access-srfdl\") on node \"crc\" DevicePath \"\"" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.796500 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.796510 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4df31adf-8c2a-46e9-9162-a795a8ff5d8a-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.818140 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/object-replicator/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.867397 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/object-server/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.974452 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/object-updater/0.log" Mar 20 13:48:47 crc kubenswrapper[4690]: I0320 13:48:47.991915 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/swift-recon-cron/0.log" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.017512 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_3f8a5e46-bb4f-498c-ac43-d9cfb5f1945a/rsync/0.log" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.166662 4690 generic.go:334] "Generic (PLEG): container finished" podID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerID="7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f" exitCode=0 Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.166703 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjppm" event={"ID":"4df31adf-8c2a-46e9-9162-a795a8ff5d8a","Type":"ContainerDied","Data":"7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f"} Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.166731 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kjppm" event={"ID":"4df31adf-8c2a-46e9-9162-a795a8ff5d8a","Type":"ContainerDied","Data":"a2be2efca155ca4a8ee32021696eae9c2b128e672768b358e08c08f34de9a9ef"} Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.166749 4690 scope.go:117] "RemoveContainer" containerID="7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.166755 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kjppm" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.194295 4690 scope.go:117] "RemoveContainer" containerID="4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.204890 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kjppm"] Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.213612 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kjppm"] Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.223905 4690 scope.go:117] "RemoveContainer" containerID="f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.254497 4690 scope.go:117] "RemoveContainer" containerID="7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f" Mar 20 13:48:48 crc kubenswrapper[4690]: E0320 13:48:48.255045 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f\": container with ID starting with 7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f not found: ID does not exist" containerID="7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.255083 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f"} err="failed to get container status \"7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f\": rpc error: code = NotFound desc = could not find container \"7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f\": container with ID starting with 7de476b90cb6f62f81d93be95fa9bfb1d0239cee1fb68c80a45c584c1310a31f not found: ID does not exist" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.255110 4690 scope.go:117] "RemoveContainer" containerID="4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5" Mar 20 13:48:48 crc kubenswrapper[4690]: E0320 13:48:48.257097 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5\": container with ID starting with 4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5 not found: ID does not exist" containerID="4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.257127 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5"} err="failed to get container status \"4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5\": rpc error: code = NotFound desc = could not find container \"4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5\": container with ID starting with 4d782367908e879b6937af78f7bb3c7bee3d83fe0669d5f65bbed265c16a4ee5 not found: ID does not exist" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.257147 4690 scope.go:117] "RemoveContainer" containerID="f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5" Mar 20 13:48:48 crc kubenswrapper[4690]: E0320 13:48:48.257467 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5\": container with ID starting with f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5 not found: ID does not exist" containerID="f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.257511 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5"} err="failed to get container status \"f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5\": rpc error: code = NotFound desc = could not find container \"f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5\": container with ID starting with f3d0936e3f45f5c3a86fdeb0d90fd45dcb2ee4483afe64d5ce1931df17f81fa5 not found: ID does not exist" Mar 20 13:48:48 crc kubenswrapper[4690]: I0320 13:48:48.429016 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" path="/var/lib/kubelet/pods/4df31adf-8c2a-46e9-9162-a795a8ff5d8a/volumes" Mar 20 13:48:49 crc kubenswrapper[4690]: I0320 13:48:49.054858 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_ce9abe13-14e4-4ce8-ae28-b52022d16a0e/memcached/0.log" Mar 20 13:48:53 crc kubenswrapper[4690]: I0320 13:48:53.093713 4690 scope.go:117] "RemoveContainer" containerID="477bd78e723b494d30bce3e15f338f131c639fe6b1fc266214f4c44a6c3857d4" Mar 20 13:48:53 crc kubenswrapper[4690]: I0320 13:48:53.119826 4690 scope.go:117] "RemoveContainer" containerID="e91cd3393898810db0bb699a1399eeabaa19cb53d3cb6758114ea9b999e302c2" Mar 20 13:48:53 crc kubenswrapper[4690]: I0320 13:48:53.192519 4690 scope.go:117] "RemoveContainer" containerID="3b636293b7cd0a98237276b9603db1f236f6378697c96d31f033c995c5aec2b6" Mar 20 13:48:53 crc kubenswrapper[4690]: I0320 13:48:53.221310 4690 scope.go:117] "RemoveContainer" containerID="46044e43cada2e61c44cca7fbbc48779ba086cc0491324f61348f1ca1ea54b4e" Mar 20 13:49:10 crc kubenswrapper[4690]: I0320 13:49:10.629484 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht_7ce1f851-f997-4617-a88f-36f2d7ca6f51/util/0.log" Mar 20 13:49:10 crc kubenswrapper[4690]: I0320 13:49:10.837412 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht_7ce1f851-f997-4617-a88f-36f2d7ca6f51/util/0.log" Mar 20 13:49:10 crc kubenswrapper[4690]: I0320 13:49:10.844801 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht_7ce1f851-f997-4617-a88f-36f2d7ca6f51/pull/0.log" Mar 20 13:49:10 crc kubenswrapper[4690]: I0320 13:49:10.845034 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht_7ce1f851-f997-4617-a88f-36f2d7ca6f51/pull/0.log" Mar 20 13:49:10 crc kubenswrapper[4690]: I0320 13:49:10.978971 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht_7ce1f851-f997-4617-a88f-36f2d7ca6f51/util/0.log" Mar 20 13:49:11 crc kubenswrapper[4690]: I0320 13:49:11.056086 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht_7ce1f851-f997-4617-a88f-36f2d7ca6f51/extract/0.log" Mar 20 13:49:11 crc kubenswrapper[4690]: I0320 13:49:11.056792 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_1fdce41130f5b29849949eff9ffde1b21c32ee084e9de87dcb7c2c7c84zpjht_7ce1f851-f997-4617-a88f-36f2d7ca6f51/pull/0.log" Mar 20 13:49:11 crc kubenswrapper[4690]: I0320 13:49:11.216512 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-59bc569d95-6jwcd_f4df792a-6016-407b-8ff0-338ab8db08f7/manager/0.log" Mar 20 13:49:11 crc kubenswrapper[4690]: I0320 13:49:11.466355 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-588d4d986b-dls6p_af375454-db79-4671-9be9-14e7b5927452/manager/0.log" Mar 20 13:49:11 crc kubenswrapper[4690]: I0320 13:49:11.583605 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-79df6bcc97-dws7r_11b96fe2-4da9-41a1-b4e3-31f5e17d8ad6/manager/0.log" Mar 20 13:49:11 crc kubenswrapper[4690]: I0320 13:49:11.733694 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-67dd5f86f5-88wmc_a602a20e-98c2-4eef-8a20-a873a5f04b56/manager/0.log" Mar 20 13:49:11 crc kubenswrapper[4690]: I0320 13:49:11.871681 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-8464cc45fb-sbjcq_67bfb5a2-f27a-48d1-829d-67c998495611/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.105864 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-8d58dc466-kq824_509b5616-903e-4638-bcac-7db706a605fb/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.177240 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6f787dddc9-rxsjr_814ca78b-98a4-4e08-8c17-2ac1e45f3f70/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.263642 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-669fff9c7c-qqp5b_54581816-9413-47c6-889c-1ae815299b20/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.327263 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-768b96df4c-jfmgm_e6edfe21-12c6-4ca3-9992-c47b65455a25/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.454487 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-55f864c847-f2cbx_d30dfb8b-246a-461f-8230-4e12b67f8475/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.551426 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-67ccfc9778-f5pph_870fb6a4-04ea-4c9a-ae30-4acb7e4a050c/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.717700 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-767865f676-pt8zd_0b64d73c-d1f2-4823-b33c-f5b4e7cfa6a5/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.781454 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-5d488d59fb-54czp_51eaf314-a93e-4736-b4ec-7de18291b971/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.894636 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-5b9f45d989-x5l86_15a47805-6294-4f70-ba77-d22857c579b9/manager/0.log" Mar 20 13:49:12 crc kubenswrapper[4690]: I0320 13:49:12.982244 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-86657c54f5bfk29_f13f1ec1-31f1-4492-876d-42ad21a46373/manager/0.log" Mar 20 13:49:13 crc kubenswrapper[4690]: I0320 13:49:13.194591 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-init-59b5998766-5npwc_749b59ec-5f9a-41b3-a48c-c5746c3d0b43/operator/0.log" Mar 20 13:49:13 crc kubenswrapper[4690]: I0320 13:49:13.366872 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-7rrdk_d762a7d0-5c65-4d5c-8e36-5d271d27f231/registry-server/0.log" Mar 20 13:49:13 crc kubenswrapper[4690]: I0320 13:49:13.542505 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-884679f54-5d2n4_ace63477-4c50-499f-958c-11135ab6a1a2/manager/0.log" Mar 20 13:49:13 crc kubenswrapper[4690]: I0320 13:49:13.685396 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-5784578c99-hhg2z_775748cf-df5a-466b-9c3f-057ca3ed36ab/manager/0.log" Mar 20 13:49:13 crc kubenswrapper[4690]: I0320 13:49:13.841719 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-c674c5965-dktfp_be5cbf2c-ee29-4c1a-9eca-50c8069886fd/manager/0.log" Mar 20 13:49:13 crc kubenswrapper[4690]: I0320 13:49:13.985747 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-85d5885774-zfkgt_bcc248ae-99f3-4554-8cfb-9bf5f72385ce/manager/0.log" Mar 20 13:49:14 crc kubenswrapper[4690]: I0320 13:49:14.009177 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-d6b694c5-9bx66_49cc1968-cbc1-432a-a952-dec062db3bd5/manager/0.log" Mar 20 13:49:14 crc kubenswrapper[4690]: I0320 13:49:14.115080 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5c5cb9c4d7-gvbnp_aa8d9ef3-cf4c-4083-bb69-579193795ffb/manager/0.log" Mar 20 13:49:14 crc kubenswrapper[4690]: I0320 13:49:14.227678 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-6c4d75f7f9-pfj42_a0d899fb-25be-4485-9d4d-77ca047c1524/manager/0.log" Mar 20 13:49:33 crc kubenswrapper[4690]: I0320 13:49:33.456797 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-vcp4n_19d07388-56ad-4bb6-bacb-2eec91c18aa8/control-plane-machine-set-operator/0.log" Mar 20 13:49:33 crc kubenswrapper[4690]: I0320 13:49:33.612191 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-8gsqv_27c73b63-ed31-4aae-bc66-5b4707f469f5/machine-api-operator/0.log" Mar 20 13:49:33 crc kubenswrapper[4690]: I0320 13:49:33.612252 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-8gsqv_27c73b63-ed31-4aae-bc66-5b4707f469f5/kube-rbac-proxy/0.log" Mar 20 13:49:46 crc kubenswrapper[4690]: I0320 13:49:46.663738 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-858654f9db-snqb2_5e031456-428c-4966-8bca-2002a392ffb2/cert-manager-controller/0.log" Mar 20 13:49:46 crc kubenswrapper[4690]: I0320 13:49:46.818186 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-cf98fcc89-gwtjq_1ecae164-1e40-4b85-b047-9e1af1192ef6/cert-manager-cainjector/0.log" Mar 20 13:49:46 crc kubenswrapper[4690]: I0320 13:49:46.855528 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-687f57d79b-gs5zl_4b4f7f31-5780-4001-b9c7-7dade6cfea4d/cert-manager-webhook/0.log" Mar 20 13:49:53 crc kubenswrapper[4690]: I0320 13:49:53.351286 4690 scope.go:117] "RemoveContainer" containerID="4c83374abc3b88f5cc4def22c450cd92748c8254e73dc65fb53053773873ecb4" Mar 20 13:49:53 crc kubenswrapper[4690]: I0320 13:49:53.385669 4690 scope.go:117] "RemoveContainer" containerID="ea454ed32cf5432c0ce784a14841615e0b4764b96a40182ae7f47a05e4b4447a" Mar 20 13:49:53 crc kubenswrapper[4690]: I0320 13:49:53.427135 4690 scope.go:117] "RemoveContainer" containerID="e64f2bfa701432df86f7551d860275c933b7969c346bc788dacec4f122f93905" Mar 20 13:49:59 crc kubenswrapper[4690]: I0320 13:49:59.217706 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-86f58fcf4-8qxkm_96ef8c08-d917-4818-aa0b-a8f40e03d5af/nmstate-console-plugin/0.log" Mar 20 13:49:59 crc kubenswrapper[4690]: I0320 13:49:59.373950 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-fbbgx_f27e925b-b583-4ebb-9c76-8b94e717572b/nmstate-handler/0.log" Mar 20 13:49:59 crc kubenswrapper[4690]: I0320 13:49:59.412644 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-9b8c8685d-njk8k_7b110386-4970-4b99-ab60-783130776002/kube-rbac-proxy/0.log" Mar 20 13:49:59 crc kubenswrapper[4690]: I0320 13:49:59.451207 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-9b8c8685d-njk8k_7b110386-4970-4b99-ab60-783130776002/nmstate-metrics/0.log" Mar 20 13:49:59 crc kubenswrapper[4690]: I0320 13:49:59.571002 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-796d4cfff4-v4l8p_244f1908-ebf4-4923-afc9-30bab5acf6bd/nmstate-operator/0.log" Mar 20 13:49:59 crc kubenswrapper[4690]: I0320 13:49:59.659816 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f558f5558-gss2p_693ab8f6-3c6b-4aea-a488-6b9b17bcd249/nmstate-webhook/0.log" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.151099 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566910-mnbvl"] Mar 20 13:50:00 crc kubenswrapper[4690]: E0320 13:50:00.151612 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerName="registry-server" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.151635 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerName="registry-server" Mar 20 13:50:00 crc kubenswrapper[4690]: E0320 13:50:00.151656 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerName="extract-content" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.151665 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerName="extract-content" Mar 20 13:50:00 crc kubenswrapper[4690]: E0320 13:50:00.151686 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerName="extract-utilities" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.151695 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerName="extract-utilities" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.152159 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="4df31adf-8c2a-46e9-9162-a795a8ff5d8a" containerName="registry-server" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.152962 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566910-mnbvl" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.154573 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.156022 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.156642 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.162681 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566910-mnbvl"] Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.236141 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n55kf\" (UniqueName: \"kubernetes.io/projected/15efb6c7-12eb-4214-a3da-83604ed317c1-kube-api-access-n55kf\") pod \"auto-csr-approver-29566910-mnbvl\" (UID: \"15efb6c7-12eb-4214-a3da-83604ed317c1\") " pod="openshift-infra/auto-csr-approver-29566910-mnbvl" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.338065 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n55kf\" (UniqueName: \"kubernetes.io/projected/15efb6c7-12eb-4214-a3da-83604ed317c1-kube-api-access-n55kf\") pod \"auto-csr-approver-29566910-mnbvl\" (UID: \"15efb6c7-12eb-4214-a3da-83604ed317c1\") " pod="openshift-infra/auto-csr-approver-29566910-mnbvl" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.359746 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n55kf\" (UniqueName: \"kubernetes.io/projected/15efb6c7-12eb-4214-a3da-83604ed317c1-kube-api-access-n55kf\") pod \"auto-csr-approver-29566910-mnbvl\" (UID: \"15efb6c7-12eb-4214-a3da-83604ed317c1\") " pod="openshift-infra/auto-csr-approver-29566910-mnbvl" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.472244 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566910-mnbvl" Mar 20 13:50:00 crc kubenswrapper[4690]: I0320 13:50:00.941783 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566910-mnbvl"] Mar 20 13:50:01 crc kubenswrapper[4690]: I0320 13:50:01.830507 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566910-mnbvl" event={"ID":"15efb6c7-12eb-4214-a3da-83604ed317c1","Type":"ContainerStarted","Data":"b178564f64e3a165776aa66e5cd5b9b679b243c06df7bd18888874d6fb5be098"} Mar 20 13:50:03 crc kubenswrapper[4690]: I0320 13:50:03.847227 4690 generic.go:334] "Generic (PLEG): container finished" podID="15efb6c7-12eb-4214-a3da-83604ed317c1" containerID="20ae55ebe3c5a3340627432d80cf0dfea2ab61caa9146b255dfd82709f600489" exitCode=0 Mar 20 13:50:03 crc kubenswrapper[4690]: I0320 13:50:03.847665 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566910-mnbvl" event={"ID":"15efb6c7-12eb-4214-a3da-83604ed317c1","Type":"ContainerDied","Data":"20ae55ebe3c5a3340627432d80cf0dfea2ab61caa9146b255dfd82709f600489"} Mar 20 13:50:05 crc kubenswrapper[4690]: I0320 13:50:05.248594 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566910-mnbvl" Mar 20 13:50:05 crc kubenswrapper[4690]: I0320 13:50:05.327003 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n55kf\" (UniqueName: \"kubernetes.io/projected/15efb6c7-12eb-4214-a3da-83604ed317c1-kube-api-access-n55kf\") pod \"15efb6c7-12eb-4214-a3da-83604ed317c1\" (UID: \"15efb6c7-12eb-4214-a3da-83604ed317c1\") " Mar 20 13:50:05 crc kubenswrapper[4690]: I0320 13:50:05.336262 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15efb6c7-12eb-4214-a3da-83604ed317c1-kube-api-access-n55kf" (OuterVolumeSpecName: "kube-api-access-n55kf") pod "15efb6c7-12eb-4214-a3da-83604ed317c1" (UID: "15efb6c7-12eb-4214-a3da-83604ed317c1"). InnerVolumeSpecName "kube-api-access-n55kf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:50:05 crc kubenswrapper[4690]: I0320 13:50:05.429140 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n55kf\" (UniqueName: \"kubernetes.io/projected/15efb6c7-12eb-4214-a3da-83604ed317c1-kube-api-access-n55kf\") on node \"crc\" DevicePath \"\"" Mar 20 13:50:05 crc kubenswrapper[4690]: I0320 13:50:05.867996 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566910-mnbvl" event={"ID":"15efb6c7-12eb-4214-a3da-83604ed317c1","Type":"ContainerDied","Data":"b178564f64e3a165776aa66e5cd5b9b679b243c06df7bd18888874d6fb5be098"} Mar 20 13:50:05 crc kubenswrapper[4690]: I0320 13:50:05.868038 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b178564f64e3a165776aa66e5cd5b9b679b243c06df7bd18888874d6fb5be098" Mar 20 13:50:05 crc kubenswrapper[4690]: I0320 13:50:05.868043 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566910-mnbvl" Mar 20 13:50:06 crc kubenswrapper[4690]: I0320 13:50:06.313101 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566904-dq7dm"] Mar 20 13:50:06 crc kubenswrapper[4690]: I0320 13:50:06.323308 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566904-dq7dm"] Mar 20 13:50:06 crc kubenswrapper[4690]: I0320 13:50:06.424576 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3777e376-e740-4d36-9378-27dec9e98ec8" path="/var/lib/kubelet/pods/3777e376-e740-4d36-9378-27dec9e98ec8/volumes" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.091362 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-7bb4cc7c98-m6wll_1bdd146d-6133-45a4-8257-db7a84e8950b/kube-rbac-proxy/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.099694 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-7bb4cc7c98-m6wll_1bdd146d-6133-45a4-8257-db7a84e8950b/controller/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.261369 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-frr-files/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.420687 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-reloader/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.424046 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-frr-files/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.450191 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-metrics/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.467680 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-reloader/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.627543 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-frr-files/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.652579 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-metrics/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.658254 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-metrics/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.658300 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-reloader/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.832562 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-reloader/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.833798 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-frr-files/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.887639 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/cp-metrics/0.log" Mar 20 13:50:27 crc kubenswrapper[4690]: I0320 13:50:27.892116 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/controller/0.log" Mar 20 13:50:28 crc kubenswrapper[4690]: I0320 13:50:28.018891 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/frr-metrics/0.log" Mar 20 13:50:28 crc kubenswrapper[4690]: I0320 13:50:28.084601 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/kube-rbac-proxy/0.log" Mar 20 13:50:28 crc kubenswrapper[4690]: I0320 13:50:28.097181 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/kube-rbac-proxy-frr/0.log" Mar 20 13:50:28 crc kubenswrapper[4690]: I0320 13:50:28.213323 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/reloader/0.log" Mar 20 13:50:28 crc kubenswrapper[4690]: I0320 13:50:28.286511 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-bcc4b6f68-95bvg_b179134f-d752-4779-bd5e-5cb469d25ac1/frr-k8s-webhook-server/0.log" Mar 20 13:50:28 crc kubenswrapper[4690]: I0320 13:50:28.579211 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-84576899bd-5q5sj_8ca65291-10fb-4dfa-9e7f-9b505e2fe542/manager/0.log" Mar 20 13:50:28 crc kubenswrapper[4690]: I0320 13:50:28.627396 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7dcf6c965f-blr7m_c0d6a728-27b8-420e-ab5b-ebee42426fad/webhook-server/0.log" Mar 20 13:50:28 crc kubenswrapper[4690]: I0320 13:50:28.759398 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7kmfq_a639575b-8878-4027-92b0-6ba4f66270bf/kube-rbac-proxy/0.log" Mar 20 13:50:28 crc kubenswrapper[4690]: I0320 13:50:28.949170 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-dp5pp_80f8737c-71bf-4a64-964c-b902b649115c/frr/0.log" Mar 20 13:50:29 crc kubenswrapper[4690]: I0320 13:50:29.159971 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-7kmfq_a639575b-8878-4027-92b0-6ba4f66270bf/speaker/0.log" Mar 20 13:50:42 crc kubenswrapper[4690]: I0320 13:50:42.622831 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb_8ce85d01-b103-4ada-930c-10ca16d9801d/util/0.log" Mar 20 13:50:42 crc kubenswrapper[4690]: I0320 13:50:42.877653 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb_8ce85d01-b103-4ada-930c-10ca16d9801d/pull/0.log" Mar 20 13:50:42 crc kubenswrapper[4690]: I0320 13:50:42.887525 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb_8ce85d01-b103-4ada-930c-10ca16d9801d/util/0.log" Mar 20 13:50:42 crc kubenswrapper[4690]: I0320 13:50:42.895457 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb_8ce85d01-b103-4ada-930c-10ca16d9801d/pull/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.086249 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb_8ce85d01-b103-4ada-930c-10ca16d9801d/extract/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.093151 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb_8ce85d01-b103-4ada-930c-10ca16d9801d/pull/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.098394 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1d8741a795bd73341bdd61a6e59c08511cf9466dbb5fc4045ac2dde8746nzcb_8ce85d01-b103-4ada-930c-10ca16d9801d/util/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.239265 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm_46209210-259c-4c0a-96e7-596a1f975b2d/util/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.401068 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm_46209210-259c-4c0a-96e7-596a1f975b2d/pull/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.408454 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm_46209210-259c-4c0a-96e7-596a1f975b2d/pull/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.417800 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm_46209210-259c-4c0a-96e7-596a1f975b2d/util/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.564402 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm_46209210-259c-4c0a-96e7-596a1f975b2d/util/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.592013 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7xhhb"] Mar 20 13:50:43 crc kubenswrapper[4690]: E0320 13:50:43.592619 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15efb6c7-12eb-4214-a3da-83604ed317c1" containerName="oc" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.592684 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="15efb6c7-12eb-4214-a3da-83604ed317c1" containerName="oc" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.592953 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="15efb6c7-12eb-4214-a3da-83604ed317c1" containerName="oc" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.594302 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.597424 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm_46209210-259c-4c0a-96e7-596a1f975b2d/pull/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.617350 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xhhb"] Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.649142 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_2d3ddce10053cc6867b5a0ce1614b30225f3a63fab79a72148165675c1v6mnm_46209210-259c-4c0a-96e7-596a1f975b2d/extract/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.749398 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-utilities\") pod \"redhat-marketplace-7xhhb\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.749739 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rq8g8\" (UniqueName: \"kubernetes.io/projected/a27bcce3-dea4-4c3b-a4ec-787a255354ad-kube-api-access-rq8g8\") pod \"redhat-marketplace-7xhhb\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.749795 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-catalog-content\") pod \"redhat-marketplace-7xhhb\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.790047 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-j6rqn_c6451112-5e01-4830-b37e-d898546035d6/extract-utilities/0.log" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.851078 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-utilities\") pod \"redhat-marketplace-7xhhb\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.851402 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rq8g8\" (UniqueName: \"kubernetes.io/projected/a27bcce3-dea4-4c3b-a4ec-787a255354ad-kube-api-access-rq8g8\") pod \"redhat-marketplace-7xhhb\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.851526 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-catalog-content\") pod \"redhat-marketplace-7xhhb\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.851562 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-utilities\") pod \"redhat-marketplace-7xhhb\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.851758 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-catalog-content\") pod \"redhat-marketplace-7xhhb\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.872571 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rq8g8\" (UniqueName: \"kubernetes.io/projected/a27bcce3-dea4-4c3b-a4ec-787a255354ad-kube-api-access-rq8g8\") pod \"redhat-marketplace-7xhhb\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:43 crc kubenswrapper[4690]: I0320 13:50:43.922459 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.027380 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-j6rqn_c6451112-5e01-4830-b37e-d898546035d6/extract-utilities/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.046159 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-j6rqn_c6451112-5e01-4830-b37e-d898546035d6/extract-content/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.122099 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-j6rqn_c6451112-5e01-4830-b37e-d898546035d6/extract-content/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.252716 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-j6rqn_c6451112-5e01-4830-b37e-d898546035d6/extract-utilities/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.322038 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-j6rqn_c6451112-5e01-4830-b37e-d898546035d6/extract-content/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.457524 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xhhb"] Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.530733 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fz7fb_afd7fa14-3170-4c3e-9370-80bcbe52e69f/extract-utilities/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.567082 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-j6rqn_c6451112-5e01-4830-b37e-d898546035d6/registry-server/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.679063 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fz7fb_afd7fa14-3170-4c3e-9370-80bcbe52e69f/extract-content/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.702174 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fz7fb_afd7fa14-3170-4c3e-9370-80bcbe52e69f/extract-utilities/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.702531 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fz7fb_afd7fa14-3170-4c3e-9370-80bcbe52e69f/extract-content/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.910128 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fz7fb_afd7fa14-3170-4c3e-9370-80bcbe52e69f/extract-utilities/0.log" Mar 20 13:50:44 crc kubenswrapper[4690]: I0320 13:50:44.941532 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fz7fb_afd7fa14-3170-4c3e-9370-80bcbe52e69f/extract-content/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.136365 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-klwmc_479d5dc0-0f18-4083-88d8-e07327096950/marketplace-operator/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.203308 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-fz7fb_afd7fa14-3170-4c3e-9370-80bcbe52e69f/registry-server/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.225909 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jghhb_74adca7d-849a-45af-9236-00ff6a15a294/extract-utilities/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.232755 4690 generic.go:334] "Generic (PLEG): container finished" podID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerID="cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6" exitCode=0 Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.232801 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xhhb" event={"ID":"a27bcce3-dea4-4c3b-a4ec-787a255354ad","Type":"ContainerDied","Data":"cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6"} Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.232825 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xhhb" event={"ID":"a27bcce3-dea4-4c3b-a4ec-787a255354ad","Type":"ContainerStarted","Data":"6d8ee13202201b5f5a55d182f703b62f0930429d36aeb13d1532d7c2e962f4b0"} Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.403483 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jghhb_74adca7d-849a-45af-9236-00ff6a15a294/extract-content/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.410955 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jghhb_74adca7d-849a-45af-9236-00ff6a15a294/extract-utilities/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.426115 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jghhb_74adca7d-849a-45af-9236-00ff6a15a294/extract-content/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.599632 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jghhb_74adca7d-849a-45af-9236-00ff6a15a294/extract-content/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.614479 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jghhb_74adca7d-849a-45af-9236-00ff6a15a294/extract-utilities/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.680738 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-jghhb_74adca7d-849a-45af-9236-00ff6a15a294/registry-server/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.797176 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dkm4r_9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa/extract-utilities/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.949353 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dkm4r_9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa/extract-content/0.log" Mar 20 13:50:45 crc kubenswrapper[4690]: I0320 13:50:45.969620 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dkm4r_9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa/extract-content/0.log" Mar 20 13:50:46 crc kubenswrapper[4690]: I0320 13:50:46.000815 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dkm4r_9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa/extract-utilities/0.log" Mar 20 13:50:46 crc kubenswrapper[4690]: I0320 13:50:46.182647 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dkm4r_9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa/extract-utilities/0.log" Mar 20 13:50:46 crc kubenswrapper[4690]: I0320 13:50:46.211455 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dkm4r_9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa/extract-content/0.log" Mar 20 13:50:46 crc kubenswrapper[4690]: I0320 13:50:46.371080 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-dkm4r_9f3b3f2f-c465-42b4-9e5e-eebdb270bcaa/registry-server/0.log" Mar 20 13:50:47 crc kubenswrapper[4690]: I0320 13:50:47.249729 4690 generic.go:334] "Generic (PLEG): container finished" podID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerID="fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c" exitCode=0 Mar 20 13:50:47 crc kubenswrapper[4690]: I0320 13:50:47.249831 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xhhb" event={"ID":"a27bcce3-dea4-4c3b-a4ec-787a255354ad","Type":"ContainerDied","Data":"fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c"} Mar 20 13:50:48 crc kubenswrapper[4690]: I0320 13:50:48.260962 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xhhb" event={"ID":"a27bcce3-dea4-4c3b-a4ec-787a255354ad","Type":"ContainerStarted","Data":"4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761"} Mar 20 13:50:48 crc kubenswrapper[4690]: I0320 13:50:48.283559 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7xhhb" podStartSLOduration=2.855974569 podStartE2EDuration="5.283540957s" podCreationTimestamp="2026-03-20 13:50:43 +0000 UTC" firstStartedPulling="2026-03-20 13:50:45.234393827 +0000 UTC m=+1691.523993770" lastFinishedPulling="2026-03-20 13:50:47.661960215 +0000 UTC m=+1693.951560158" observedRunningTime="2026-03-20 13:50:48.281717855 +0000 UTC m=+1694.571317808" watchObservedRunningTime="2026-03-20 13:50:48.283540957 +0000 UTC m=+1694.573140900" Mar 20 13:50:53 crc kubenswrapper[4690]: I0320 13:50:53.514596 4690 scope.go:117] "RemoveContainer" containerID="021d3d31373c90d17fcc5b543b2b52f1c4225fc63a4eff6b14cd6ba5fdedd6d6" Mar 20 13:50:53 crc kubenswrapper[4690]: I0320 13:50:53.923318 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:53 crc kubenswrapper[4690]: I0320 13:50:53.923721 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:54 crc kubenswrapper[4690]: I0320 13:50:54.014440 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:54 crc kubenswrapper[4690]: I0320 13:50:54.364625 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:54 crc kubenswrapper[4690]: I0320 13:50:54.434583 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xhhb"] Mar 20 13:50:56 crc kubenswrapper[4690]: I0320 13:50:56.342802 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7xhhb" podUID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerName="registry-server" containerID="cri-o://4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761" gracePeriod=2 Mar 20 13:50:56 crc kubenswrapper[4690]: I0320 13:50:56.875002 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.023516 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rq8g8\" (UniqueName: \"kubernetes.io/projected/a27bcce3-dea4-4c3b-a4ec-787a255354ad-kube-api-access-rq8g8\") pod \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.024907 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-utilities\") pod \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.025034 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-catalog-content\") pod \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\" (UID: \"a27bcce3-dea4-4c3b-a4ec-787a255354ad\") " Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.025740 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-utilities" (OuterVolumeSpecName: "utilities") pod "a27bcce3-dea4-4c3b-a4ec-787a255354ad" (UID: "a27bcce3-dea4-4c3b-a4ec-787a255354ad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.031158 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a27bcce3-dea4-4c3b-a4ec-787a255354ad-kube-api-access-rq8g8" (OuterVolumeSpecName: "kube-api-access-rq8g8") pod "a27bcce3-dea4-4c3b-a4ec-787a255354ad" (UID: "a27bcce3-dea4-4c3b-a4ec-787a255354ad"). InnerVolumeSpecName "kube-api-access-rq8g8". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.083414 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a27bcce3-dea4-4c3b-a4ec-787a255354ad" (UID: "a27bcce3-dea4-4c3b-a4ec-787a255354ad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.127409 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rq8g8\" (UniqueName: \"kubernetes.io/projected/a27bcce3-dea4-4c3b-a4ec-787a255354ad-kube-api-access-rq8g8\") on node \"crc\" DevicePath \"\"" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.127445 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.127454 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a27bcce3-dea4-4c3b-a4ec-787a255354ad-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.355131 4690 generic.go:334] "Generic (PLEG): container finished" podID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerID="4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761" exitCode=0 Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.355183 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xhhb" event={"ID":"a27bcce3-dea4-4c3b-a4ec-787a255354ad","Type":"ContainerDied","Data":"4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761"} Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.355231 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7xhhb" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.355255 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7xhhb" event={"ID":"a27bcce3-dea4-4c3b-a4ec-787a255354ad","Type":"ContainerDied","Data":"6d8ee13202201b5f5a55d182f703b62f0930429d36aeb13d1532d7c2e962f4b0"} Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.355296 4690 scope.go:117] "RemoveContainer" containerID="4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.376700 4690 scope.go:117] "RemoveContainer" containerID="fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.398338 4690 scope.go:117] "RemoveContainer" containerID="cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.416634 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xhhb"] Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.429962 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7xhhb"] Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.442006 4690 scope.go:117] "RemoveContainer" containerID="4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761" Mar 20 13:50:57 crc kubenswrapper[4690]: E0320 13:50:57.446038 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761\": container with ID starting with 4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761 not found: ID does not exist" containerID="4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.446099 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761"} err="failed to get container status \"4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761\": rpc error: code = NotFound desc = could not find container \"4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761\": container with ID starting with 4eebe002c84ae3ed012a9ca7a3b7dd32070f086b9652db817e77505137936761 not found: ID does not exist" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.446131 4690 scope.go:117] "RemoveContainer" containerID="fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c" Mar 20 13:50:57 crc kubenswrapper[4690]: E0320 13:50:57.448393 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c\": container with ID starting with fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c not found: ID does not exist" containerID="fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.448430 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c"} err="failed to get container status \"fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c\": rpc error: code = NotFound desc = could not find container \"fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c\": container with ID starting with fe2bf6e7c997df36ecc4ebf5b014fbb58ebbceae4c7d3623e8241fec74c8af2c not found: ID does not exist" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.448454 4690 scope.go:117] "RemoveContainer" containerID="cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6" Mar 20 13:50:57 crc kubenswrapper[4690]: E0320 13:50:57.449311 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6\": container with ID starting with cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6 not found: ID does not exist" containerID="cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6" Mar 20 13:50:57 crc kubenswrapper[4690]: I0320 13:50:57.449353 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6"} err="failed to get container status \"cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6\": rpc error: code = NotFound desc = could not find container \"cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6\": container with ID starting with cbdc12e4668bff48a711e2c914e0e8c630dba48cf4fd983624eca3ed95725ea6 not found: ID does not exist" Mar 20 13:50:58 crc kubenswrapper[4690]: I0320 13:50:58.427235 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" path="/var/lib/kubelet/pods/a27bcce3-dea4-4c3b-a4ec-787a255354ad/volumes" Mar 20 13:51:03 crc kubenswrapper[4690]: I0320 13:51:03.829661 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:51:03 crc kubenswrapper[4690]: I0320 13:51:03.830218 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:51:33 crc kubenswrapper[4690]: I0320 13:51:33.829350 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:51:33 crc kubenswrapper[4690]: I0320 13:51:33.829999 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:51:53 crc kubenswrapper[4690]: I0320 13:51:53.611177 4690 scope.go:117] "RemoveContainer" containerID="65d13e6f22c58f517425c342af9294a86067b0d1bb4ecc0e7e8547c83629f775" Mar 20 13:51:53 crc kubenswrapper[4690]: I0320 13:51:53.648213 4690 scope.go:117] "RemoveContainer" containerID="7cffff86788f60940a1792daa55d1752197add35930c7d487367a4d90df4f056" Mar 20 13:51:53 crc kubenswrapper[4690]: I0320 13:51:53.692092 4690 scope.go:117] "RemoveContainer" containerID="d8fa1083f66c1f32ef327d783429ec82fcddc21e2993b92c074dc03c0b37a8a7" Mar 20 13:51:53 crc kubenswrapper[4690]: I0320 13:51:53.725383 4690 scope.go:117] "RemoveContainer" containerID="cf65bfa25af093e923e8b70315be4798ae990b3772e2aa12319cab9b96a6540e" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.170455 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566912-pn9b9"] Mar 20 13:52:00 crc kubenswrapper[4690]: E0320 13:52:00.171780 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerName="registry-server" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.171801 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerName="registry-server" Mar 20 13:52:00 crc kubenswrapper[4690]: E0320 13:52:00.171828 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerName="extract-utilities" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.171839 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerName="extract-utilities" Mar 20 13:52:00 crc kubenswrapper[4690]: E0320 13:52:00.171948 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerName="extract-content" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.171962 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerName="extract-content" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.172255 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="a27bcce3-dea4-4c3b-a4ec-787a255354ad" containerName="registry-server" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.173268 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566912-pn9b9" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.176303 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.176720 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.176839 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.183031 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566912-pn9b9"] Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.319059 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65bsl\" (UniqueName: \"kubernetes.io/projected/9f7aa92b-b121-46f6-a3b4-46add0d677b8-kube-api-access-65bsl\") pod \"auto-csr-approver-29566912-pn9b9\" (UID: \"9f7aa92b-b121-46f6-a3b4-46add0d677b8\") " pod="openshift-infra/auto-csr-approver-29566912-pn9b9" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.420841 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65bsl\" (UniqueName: \"kubernetes.io/projected/9f7aa92b-b121-46f6-a3b4-46add0d677b8-kube-api-access-65bsl\") pod \"auto-csr-approver-29566912-pn9b9\" (UID: \"9f7aa92b-b121-46f6-a3b4-46add0d677b8\") " pod="openshift-infra/auto-csr-approver-29566912-pn9b9" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.463332 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65bsl\" (UniqueName: \"kubernetes.io/projected/9f7aa92b-b121-46f6-a3b4-46add0d677b8-kube-api-access-65bsl\") pod \"auto-csr-approver-29566912-pn9b9\" (UID: \"9f7aa92b-b121-46f6-a3b4-46add0d677b8\") " pod="openshift-infra/auto-csr-approver-29566912-pn9b9" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.508706 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566912-pn9b9" Mar 20 13:52:00 crc kubenswrapper[4690]: I0320 13:52:00.998380 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566912-pn9b9"] Mar 20 13:52:01 crc kubenswrapper[4690]: I0320 13:52:01.083309 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566912-pn9b9" event={"ID":"9f7aa92b-b121-46f6-a3b4-46add0d677b8","Type":"ContainerStarted","Data":"bf3dadf9ceeafa5b1ff85cde1f478cd3131f6512942ab2048f912fbdf73021f6"} Mar 20 13:52:03 crc kubenswrapper[4690]: I0320 13:52:03.103975 4690 generic.go:334] "Generic (PLEG): container finished" podID="9f7aa92b-b121-46f6-a3b4-46add0d677b8" containerID="c1fabfc928214b7916efa2010700b1251f4630a9e57ad1ea3cb15466942cbd8b" exitCode=0 Mar 20 13:52:03 crc kubenswrapper[4690]: I0320 13:52:03.104064 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566912-pn9b9" event={"ID":"9f7aa92b-b121-46f6-a3b4-46add0d677b8","Type":"ContainerDied","Data":"c1fabfc928214b7916efa2010700b1251f4630a9e57ad1ea3cb15466942cbd8b"} Mar 20 13:52:03 crc kubenswrapper[4690]: I0320 13:52:03.829538 4690 patch_prober.go:28] interesting pod/machine-config-daemon-ftcqx container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Mar 20 13:52:03 crc kubenswrapper[4690]: I0320 13:52:03.829883 4690 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Mar 20 13:52:03 crc kubenswrapper[4690]: I0320 13:52:03.829922 4690 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" Mar 20 13:52:03 crc kubenswrapper[4690]: I0320 13:52:03.830555 4690 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651"} pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Mar 20 13:52:03 crc kubenswrapper[4690]: I0320 13:52:03.830617 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" containerName="machine-config-daemon" containerID="cri-o://fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" gracePeriod=600 Mar 20 13:52:03 crc kubenswrapper[4690]: E0320 13:52:03.952906 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.052279 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-899c-account-create-update-pd277"] Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.066317 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-008d-account-create-update-mw8ch"] Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.084239 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-899c-account-create-update-pd277"] Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.094409 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-008d-account-create-update-mw8ch"] Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.106278 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-27tlv"] Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.114308 4690 generic.go:334] "Generic (PLEG): container finished" podID="60ded650-b298-4115-8286-8969b94d4062" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" exitCode=0 Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.114381 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerDied","Data":"fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651"} Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.114438 4690 scope.go:117] "RemoveContainer" containerID="5e696252e251066c1296443f70dfdb2d4815582a27f8d0fb1a1a2dd90457b26f" Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.115198 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:52:04 crc kubenswrapper[4690]: E0320 13:52:04.115450 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.117706 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-27tlv"] Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.139444 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-2dbfj"] Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.153726 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-2dbfj"] Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.426961 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a16960c-b84b-4b25-b51a-9f5dad54e473" path="/var/lib/kubelet/pods/0a16960c-b84b-4b25-b51a-9f5dad54e473/volumes" Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.427973 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c82e598b-5f84-4e68-aa8f-5682574fcae9" path="/var/lib/kubelet/pods/c82e598b-5f84-4e68-aa8f-5682574fcae9/volumes" Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.428617 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cc9e0393-9cc0-4120-8661-31fc5e0a77f6" path="/var/lib/kubelet/pods/cc9e0393-9cc0-4120-8661-31fc5e0a77f6/volumes" Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.429304 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1f9d0e8-10b6-4aa6-ae3a-890f9e521253" path="/var/lib/kubelet/pods/d1f9d0e8-10b6-4aa6-ae3a-890f9e521253/volumes" Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.452990 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566912-pn9b9" Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.621167 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65bsl\" (UniqueName: \"kubernetes.io/projected/9f7aa92b-b121-46f6-a3b4-46add0d677b8-kube-api-access-65bsl\") pod \"9f7aa92b-b121-46f6-a3b4-46add0d677b8\" (UID: \"9f7aa92b-b121-46f6-a3b4-46add0d677b8\") " Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.626354 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f7aa92b-b121-46f6-a3b4-46add0d677b8-kube-api-access-65bsl" (OuterVolumeSpecName: "kube-api-access-65bsl") pod "9f7aa92b-b121-46f6-a3b4-46add0d677b8" (UID: "9f7aa92b-b121-46f6-a3b4-46add0d677b8"). InnerVolumeSpecName "kube-api-access-65bsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:52:04 crc kubenswrapper[4690]: I0320 13:52:04.723448 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65bsl\" (UniqueName: \"kubernetes.io/projected/9f7aa92b-b121-46f6-a3b4-46add0d677b8-kube-api-access-65bsl\") on node \"crc\" DevicePath \"\"" Mar 20 13:52:05 crc kubenswrapper[4690]: I0320 13:52:05.128364 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566912-pn9b9" event={"ID":"9f7aa92b-b121-46f6-a3b4-46add0d677b8","Type":"ContainerDied","Data":"bf3dadf9ceeafa5b1ff85cde1f478cd3131f6512942ab2048f912fbdf73021f6"} Mar 20 13:52:05 crc kubenswrapper[4690]: I0320 13:52:05.128457 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bf3dadf9ceeafa5b1ff85cde1f478cd3131f6512942ab2048f912fbdf73021f6" Mar 20 13:52:05 crc kubenswrapper[4690]: I0320 13:52:05.128535 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566912-pn9b9" Mar 20 13:52:05 crc kubenswrapper[4690]: I0320 13:52:05.518820 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566906-ncdqs"] Mar 20 13:52:05 crc kubenswrapper[4690]: I0320 13:52:05.528141 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566906-ncdqs"] Mar 20 13:52:06 crc kubenswrapper[4690]: I0320 13:52:06.438699 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c007960-0fbe-471e-95a9-fecd3bcfb0f5" path="/var/lib/kubelet/pods/7c007960-0fbe-471e-95a9-fecd3bcfb0f5/volumes" Mar 20 13:52:07 crc kubenswrapper[4690]: I0320 13:52:07.058903 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-thbgv"] Mar 20 13:52:07 crc kubenswrapper[4690]: I0320 13:52:07.075370 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-245d-account-create-update-gsx28"] Mar 20 13:52:07 crc kubenswrapper[4690]: I0320 13:52:07.086109 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-thbgv"] Mar 20 13:52:07 crc kubenswrapper[4690]: I0320 13:52:07.096710 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-245d-account-create-update-gsx28"] Mar 20 13:52:08 crc kubenswrapper[4690]: I0320 13:52:08.432461 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57aa5abf-4617-4b31-8a02-2721982d912c" path="/var/lib/kubelet/pods/57aa5abf-4617-4b31-8a02-2721982d912c/volumes" Mar 20 13:52:08 crc kubenswrapper[4690]: I0320 13:52:08.433664 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f89adcb7-be07-48cd-8e10-0c8509a96029" path="/var/lib/kubelet/pods/f89adcb7-be07-48cd-8e10-0c8509a96029/volumes" Mar 20 13:52:18 crc kubenswrapper[4690]: I0320 13:52:18.421279 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:52:18 crc kubenswrapper[4690]: E0320 13:52:18.422105 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:52:21 crc kubenswrapper[4690]: I0320 13:52:21.330396 4690 generic.go:334] "Generic (PLEG): container finished" podID="bb362278-db9c-48b1-94c8-6da00bd25d9e" containerID="6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f" exitCode=0 Mar 20 13:52:21 crc kubenswrapper[4690]: I0320 13:52:21.330487 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-82724/must-gather-zvwq2" event={"ID":"bb362278-db9c-48b1-94c8-6da00bd25d9e","Type":"ContainerDied","Data":"6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f"} Mar 20 13:52:21 crc kubenswrapper[4690]: I0320 13:52:21.331253 4690 scope.go:117] "RemoveContainer" containerID="6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f" Mar 20 13:52:21 crc kubenswrapper[4690]: E0320 13:52:21.429929 4690 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb362278_db9c_48b1_94c8_6da00bd25d9e.slice/crio-conmon-6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f.scope\": RecentStats: unable to find data in memory cache]" Mar 20 13:52:22 crc kubenswrapper[4690]: I0320 13:52:22.335993 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-82724_must-gather-zvwq2_bb362278-db9c-48b1-94c8-6da00bd25d9e/gather/0.log" Mar 20 13:52:30 crc kubenswrapper[4690]: I0320 13:52:30.632611 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-82724/must-gather-zvwq2"] Mar 20 13:52:30 crc kubenswrapper[4690]: I0320 13:52:30.633472 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-82724/must-gather-zvwq2" podUID="bb362278-db9c-48b1-94c8-6da00bd25d9e" containerName="copy" containerID="cri-o://da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b" gracePeriod=2 Mar 20 13:52:30 crc kubenswrapper[4690]: I0320 13:52:30.640016 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-82724/must-gather-zvwq2"] Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.066088 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-82724_must-gather-zvwq2_bb362278-db9c-48b1-94c8-6da00bd25d9e/copy/0.log" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.066757 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.207610 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ps6fw\" (UniqueName: \"kubernetes.io/projected/bb362278-db9c-48b1-94c8-6da00bd25d9e-kube-api-access-ps6fw\") pod \"bb362278-db9c-48b1-94c8-6da00bd25d9e\" (UID: \"bb362278-db9c-48b1-94c8-6da00bd25d9e\") " Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.207884 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bb362278-db9c-48b1-94c8-6da00bd25d9e-must-gather-output\") pod \"bb362278-db9c-48b1-94c8-6da00bd25d9e\" (UID: \"bb362278-db9c-48b1-94c8-6da00bd25d9e\") " Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.213470 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb362278-db9c-48b1-94c8-6da00bd25d9e-kube-api-access-ps6fw" (OuterVolumeSpecName: "kube-api-access-ps6fw") pod "bb362278-db9c-48b1-94c8-6da00bd25d9e" (UID: "bb362278-db9c-48b1-94c8-6da00bd25d9e"). InnerVolumeSpecName "kube-api-access-ps6fw". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.309679 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ps6fw\" (UniqueName: \"kubernetes.io/projected/bb362278-db9c-48b1-94c8-6da00bd25d9e-kube-api-access-ps6fw\") on node \"crc\" DevicePath \"\"" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.343622 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bb362278-db9c-48b1-94c8-6da00bd25d9e-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "bb362278-db9c-48b1-94c8-6da00bd25d9e" (UID: "bb362278-db9c-48b1-94c8-6da00bd25d9e"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.411178 4690 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/bb362278-db9c-48b1-94c8-6da00bd25d9e-must-gather-output\") on node \"crc\" DevicePath \"\"" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.436015 4690 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-82724_must-gather-zvwq2_bb362278-db9c-48b1-94c8-6da00bd25d9e/copy/0.log" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.436589 4690 generic.go:334] "Generic (PLEG): container finished" podID="bb362278-db9c-48b1-94c8-6da00bd25d9e" containerID="da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b" exitCode=143 Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.436651 4690 scope.go:117] "RemoveContainer" containerID="da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.436704 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-82724/must-gather-zvwq2" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.457634 4690 scope.go:117] "RemoveContainer" containerID="6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.521689 4690 scope.go:117] "RemoveContainer" containerID="da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b" Mar 20 13:52:31 crc kubenswrapper[4690]: E0320 13:52:31.522136 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b\": container with ID starting with da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b not found: ID does not exist" containerID="da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.522185 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b"} err="failed to get container status \"da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b\": rpc error: code = NotFound desc = could not find container \"da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b\": container with ID starting with da308a057792f8d6e6107e518eed245401320660bd569db4ec575777a01f684b not found: ID does not exist" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.522217 4690 scope.go:117] "RemoveContainer" containerID="6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f" Mar 20 13:52:31 crc kubenswrapper[4690]: E0320 13:52:31.522634 4690 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f\": container with ID starting with 6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f not found: ID does not exist" containerID="6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f" Mar 20 13:52:31 crc kubenswrapper[4690]: I0320 13:52:31.522672 4690 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f"} err="failed to get container status \"6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f\": rpc error: code = NotFound desc = could not find container \"6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f\": container with ID starting with 6dacd4f90816fbe28ab6f7ff2ae4a269fdb19ae3b19c9a65ac3bb22f3ab1949f not found: ID does not exist" Mar 20 13:52:32 crc kubenswrapper[4690]: I0320 13:52:32.039619 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-rf8w7"] Mar 20 13:52:32 crc kubenswrapper[4690]: I0320 13:52:32.055057 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-rf8w7"] Mar 20 13:52:32 crc kubenswrapper[4690]: I0320 13:52:32.429588 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb362278-db9c-48b1-94c8-6da00bd25d9e" path="/var/lib/kubelet/pods/bb362278-db9c-48b1-94c8-6da00bd25d9e/volumes" Mar 20 13:52:32 crc kubenswrapper[4690]: I0320 13:52:32.431214 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e" path="/var/lib/kubelet/pods/e5d5fc4d-8398-4ccd-bf8e-69bbd639e23e/volumes" Mar 20 13:52:33 crc kubenswrapper[4690]: I0320 13:52:33.415243 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:52:33 crc kubenswrapper[4690]: E0320 13:52:33.415993 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:52:36 crc kubenswrapper[4690]: I0320 13:52:36.042878 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/root-account-create-update-nxkjc"] Mar 20 13:52:36 crc kubenswrapper[4690]: I0320 13:52:36.058740 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/root-account-create-update-nxkjc"] Mar 20 13:52:36 crc kubenswrapper[4690]: I0320 13:52:36.427283 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ca2df3b-74d4-4d7f-907f-7893a816cc3a" path="/var/lib/kubelet/pods/4ca2df3b-74d4-4d7f-907f-7893a816cc3a/volumes" Mar 20 13:52:46 crc kubenswrapper[4690]: I0320 13:52:46.415317 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:52:46 crc kubenswrapper[4690]: E0320 13:52:46.416050 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:52:49 crc kubenswrapper[4690]: I0320 13:52:49.050776 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-9a17-account-create-update-jcbrv"] Mar 20 13:52:49 crc kubenswrapper[4690]: I0320 13:52:49.064337 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-fhxfq"] Mar 20 13:52:49 crc kubenswrapper[4690]: I0320 13:52:49.075151 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-fhxfq"] Mar 20 13:52:49 crc kubenswrapper[4690]: I0320 13:52:49.091619 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-9a17-account-create-update-jcbrv"] Mar 20 13:52:49 crc kubenswrapper[4690]: I0320 13:52:49.099158 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-hw7cv"] Mar 20 13:52:49 crc kubenswrapper[4690]: I0320 13:52:49.105797 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-78ee-account-create-update-88b9m"] Mar 20 13:52:49 crc kubenswrapper[4690]: I0320 13:52:49.112652 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-hw7cv"] Mar 20 13:52:49 crc kubenswrapper[4690]: I0320 13:52:49.119645 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-78ee-account-create-update-88b9m"] Mar 20 13:52:50 crc kubenswrapper[4690]: I0320 13:52:50.427490 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa" path="/var/lib/kubelet/pods/0054dac3-92d7-4c60-8d3e-a4b0f4b48dfa/volumes" Mar 20 13:52:50 crc kubenswrapper[4690]: I0320 13:52:50.429230 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02be9901-f882-45b3-8d1e-9105f2551417" path="/var/lib/kubelet/pods/02be9901-f882-45b3-8d1e-9105f2551417/volumes" Mar 20 13:52:50 crc kubenswrapper[4690]: I0320 13:52:50.432546 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="519b3e37-0f94-4018-97e2-7c7b0b99df0d" path="/var/lib/kubelet/pods/519b3e37-0f94-4018-97e2-7c7b0b99df0d/volumes" Mar 20 13:52:50 crc kubenswrapper[4690]: I0320 13:52:50.434112 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="94049a0c-7da4-43be-8e15-36e9a282f728" path="/var/lib/kubelet/pods/94049a0c-7da4-43be-8e15-36e9a282f728/volumes" Mar 20 13:52:52 crc kubenswrapper[4690]: I0320 13:52:52.039132 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-be37-account-create-update-2w6zx"] Mar 20 13:52:52 crc kubenswrapper[4690]: I0320 13:52:52.055038 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-gblwq"] Mar 20 13:52:52 crc kubenswrapper[4690]: I0320 13:52:52.066080 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-be37-account-create-update-2w6zx"] Mar 20 13:52:52 crc kubenswrapper[4690]: I0320 13:52:52.075925 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-gblwq"] Mar 20 13:52:52 crc kubenswrapper[4690]: I0320 13:52:52.425167 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9" path="/var/lib/kubelet/pods/49af4b20-8cbb-4d8e-af0d-d9fa38cb32e9/volumes" Mar 20 13:52:52 crc kubenswrapper[4690]: I0320 13:52:52.426047 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4fcecc7-f191-472f-abcc-d886648e5ecc" path="/var/lib/kubelet/pods/b4fcecc7-f191-472f-abcc-d886648e5ecc/volumes" Mar 20 13:52:53 crc kubenswrapper[4690]: I0320 13:52:53.842195 4690 scope.go:117] "RemoveContainer" containerID="4c1262cac50f850443ac0784133a3bec13148564576f72fd8bd86dd7939ee867" Mar 20 13:52:53 crc kubenswrapper[4690]: I0320 13:52:53.868734 4690 scope.go:117] "RemoveContainer" containerID="eb111293e600d9a3624e22f5aa97930efc27c52255ef7dcb28e16000e9de567a" Mar 20 13:52:53 crc kubenswrapper[4690]: I0320 13:52:53.951863 4690 scope.go:117] "RemoveContainer" containerID="29992538275ba381fbefd3befbca7b0eba29ec6b5dd30f5755ceb43330847122" Mar 20 13:52:53 crc kubenswrapper[4690]: I0320 13:52:53.984567 4690 scope.go:117] "RemoveContainer" containerID="787fc292eebb92aeea979bdffb348af9a0800bbeffe4f971a739a369a6d012f4" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.019416 4690 scope.go:117] "RemoveContainer" containerID="bef8e245fb0edee4e91198217bb3b3de03a43166943d42f2a03766f3ea36ca06" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.065469 4690 scope.go:117] "RemoveContainer" containerID="5e06537acde668849b70cae7ff14df88ac5b57ab12328030c1622dacee40a743" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.127407 4690 scope.go:117] "RemoveContainer" containerID="cdc32408e28f9ccb44f848d88c919534fcaa88503c1d8e4ed1faeffa24eef7b6" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.172120 4690 scope.go:117] "RemoveContainer" containerID="39394e082621b97880542a3f7bdd98c3e70f119252c72189dab79ebec56063d1" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.198547 4690 scope.go:117] "RemoveContainer" containerID="329d01534bfd7a81220169a396b9bed1e88369dc534b29f8fa99af55ce622761" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.223839 4690 scope.go:117] "RemoveContainer" containerID="a6c88d86c601e34db86bc4703a7d5c89bae18abe3c3068f5ed652fe9d4520297" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.252614 4690 scope.go:117] "RemoveContainer" containerID="8b11fa790332f407aebacf84b9bf8b75c7146e07a70f286809d47f6bf74acded" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.283607 4690 scope.go:117] "RemoveContainer" containerID="39687065115c8ffb25b940c788e71bc5eb58fcb0c897217f100dbdf8d5b3e004" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.329112 4690 scope.go:117] "RemoveContainer" containerID="cf9d7b43b434b8c79a344c14e0d52da636df4451b12a8483fc4cbf45930803fb" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.357778 4690 scope.go:117] "RemoveContainer" containerID="e9f6051f9cfa372895e19ab05fd56c78be80d581d9ade62c1924639c83211f6c" Mar 20 13:52:54 crc kubenswrapper[4690]: I0320 13:52:54.383962 4690 scope.go:117] "RemoveContainer" containerID="e1544a1675cfd2b2cfb02566259ee5a3b716d38ea4bc17cf23bfb782bdc54bdc" Mar 20 13:52:57 crc kubenswrapper[4690]: I0320 13:52:57.056932 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-r8lpw"] Mar 20 13:52:57 crc kubenswrapper[4690]: I0320 13:52:57.065021 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-r8lpw"] Mar 20 13:52:58 crc kubenswrapper[4690]: I0320 13:52:58.436070 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75808517-3db4-41a1-ac99-99324152c26d" path="/var/lib/kubelet/pods/75808517-3db4-41a1-ac99-99324152c26d/volumes" Mar 20 13:53:00 crc kubenswrapper[4690]: I0320 13:53:00.414008 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:53:00 crc kubenswrapper[4690]: E0320 13:53:00.414519 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:53:12 crc kubenswrapper[4690]: I0320 13:53:12.415578 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:53:12 crc kubenswrapper[4690]: E0320 13:53:12.416690 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.658193 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q4dbs"] Mar 20 13:53:16 crc kubenswrapper[4690]: E0320 13:53:16.659306 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb362278-db9c-48b1-94c8-6da00bd25d9e" containerName="copy" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.659323 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb362278-db9c-48b1-94c8-6da00bd25d9e" containerName="copy" Mar 20 13:53:16 crc kubenswrapper[4690]: E0320 13:53:16.659338 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb362278-db9c-48b1-94c8-6da00bd25d9e" containerName="gather" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.659346 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb362278-db9c-48b1-94c8-6da00bd25d9e" containerName="gather" Mar 20 13:53:16 crc kubenswrapper[4690]: E0320 13:53:16.659356 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f7aa92b-b121-46f6-a3b4-46add0d677b8" containerName="oc" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.659364 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f7aa92b-b121-46f6-a3b4-46add0d677b8" containerName="oc" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.659585 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f7aa92b-b121-46f6-a3b4-46add0d677b8" containerName="oc" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.659604 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb362278-db9c-48b1-94c8-6da00bd25d9e" containerName="copy" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.659627 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb362278-db9c-48b1-94c8-6da00bd25d9e" containerName="gather" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.661362 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.682676 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q4dbs"] Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.728234 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-catalog-content\") pod \"certified-operators-q4dbs\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.728350 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmzpf\" (UniqueName: \"kubernetes.io/projected/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-kube-api-access-wmzpf\") pod \"certified-operators-q4dbs\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.728539 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-utilities\") pod \"certified-operators-q4dbs\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.831394 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-utilities\") pod \"certified-operators-q4dbs\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.831580 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-catalog-content\") pod \"certified-operators-q4dbs\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.831629 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmzpf\" (UniqueName: \"kubernetes.io/projected/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-kube-api-access-wmzpf\") pod \"certified-operators-q4dbs\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.832693 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-utilities\") pod \"certified-operators-q4dbs\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.833048 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-catalog-content\") pod \"certified-operators-q4dbs\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:16 crc kubenswrapper[4690]: I0320 13:53:16.858594 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmzpf\" (UniqueName: \"kubernetes.io/projected/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-kube-api-access-wmzpf\") pod \"certified-operators-q4dbs\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:17 crc kubenswrapper[4690]: I0320 13:53:17.010196 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:17 crc kubenswrapper[4690]: I0320 13:53:17.580763 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q4dbs"] Mar 20 13:53:17 crc kubenswrapper[4690]: W0320 13:53:17.583959 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0d0478c8_4f0d_4823_a94f_92ca1fda92a7.slice/crio-956fe6038d403041ccc88619e92bce9021a1ba108eaa9517a7c189eb59d2576a WatchSource:0}: Error finding container 956fe6038d403041ccc88619e92bce9021a1ba108eaa9517a7c189eb59d2576a: Status 404 returned error can't find the container with id 956fe6038d403041ccc88619e92bce9021a1ba108eaa9517a7c189eb59d2576a Mar 20 13:53:17 crc kubenswrapper[4690]: I0320 13:53:17.943780 4690 generic.go:334] "Generic (PLEG): container finished" podID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerID="63b491d748e97f1ba3c248c1190a16fa7b2820b8090537799b15e5a8524a8071" exitCode=0 Mar 20 13:53:17 crc kubenswrapper[4690]: I0320 13:53:17.944113 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4dbs" event={"ID":"0d0478c8-4f0d-4823-a94f-92ca1fda92a7","Type":"ContainerDied","Data":"63b491d748e97f1ba3c248c1190a16fa7b2820b8090537799b15e5a8524a8071"} Mar 20 13:53:17 crc kubenswrapper[4690]: I0320 13:53:17.944159 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4dbs" event={"ID":"0d0478c8-4f0d-4823-a94f-92ca1fda92a7","Type":"ContainerStarted","Data":"956fe6038d403041ccc88619e92bce9021a1ba108eaa9517a7c189eb59d2576a"} Mar 20 13:53:17 crc kubenswrapper[4690]: I0320 13:53:17.946575 4690 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Mar 20 13:53:18 crc kubenswrapper[4690]: I0320 13:53:18.958392 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4dbs" event={"ID":"0d0478c8-4f0d-4823-a94f-92ca1fda92a7","Type":"ContainerStarted","Data":"c7f06f10980dff954e0347f26791016260892da7e732cec886149c73f20adf35"} Mar 20 13:53:19 crc kubenswrapper[4690]: I0320 13:53:19.969528 4690 generic.go:334] "Generic (PLEG): container finished" podID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerID="c7f06f10980dff954e0347f26791016260892da7e732cec886149c73f20adf35" exitCode=0 Mar 20 13:53:19 crc kubenswrapper[4690]: I0320 13:53:19.969732 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4dbs" event={"ID":"0d0478c8-4f0d-4823-a94f-92ca1fda92a7","Type":"ContainerDied","Data":"c7f06f10980dff954e0347f26791016260892da7e732cec886149c73f20adf35"} Mar 20 13:53:22 crc kubenswrapper[4690]: I0320 13:53:22.009963 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4dbs" event={"ID":"0d0478c8-4f0d-4823-a94f-92ca1fda92a7","Type":"ContainerStarted","Data":"b2d8e777876a197b96f0ce528ed8ac1ffaf5368eac84af45a73ccf050254910f"} Mar 20 13:53:22 crc kubenswrapper[4690]: I0320 13:53:22.039636 4690 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q4dbs" podStartSLOduration=2.488998493 podStartE2EDuration="6.039608103s" podCreationTimestamp="2026-03-20 13:53:16 +0000 UTC" firstStartedPulling="2026-03-20 13:53:17.94633334 +0000 UTC m=+1844.235933293" lastFinishedPulling="2026-03-20 13:53:21.49694294 +0000 UTC m=+1847.786542903" observedRunningTime="2026-03-20 13:53:22.038265425 +0000 UTC m=+1848.327865378" watchObservedRunningTime="2026-03-20 13:53:22.039608103 +0000 UTC m=+1848.329208076" Mar 20 13:53:24 crc kubenswrapper[4690]: I0320 13:53:24.427088 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:53:24 crc kubenswrapper[4690]: E0320 13:53:24.427817 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:53:25 crc kubenswrapper[4690]: I0320 13:53:25.074430 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-8h88w"] Mar 20 13:53:25 crc kubenswrapper[4690]: I0320 13:53:25.093584 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-8h88w"] Mar 20 13:53:26 crc kubenswrapper[4690]: I0320 13:53:26.432039 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="badac960-83c0-4715-b125-0fdd44ae7315" path="/var/lib/kubelet/pods/badac960-83c0-4715-b125-0fdd44ae7315/volumes" Mar 20 13:53:27 crc kubenswrapper[4690]: I0320 13:53:27.011389 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:27 crc kubenswrapper[4690]: I0320 13:53:27.011479 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:27 crc kubenswrapper[4690]: I0320 13:53:27.056328 4690 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:27 crc kubenswrapper[4690]: I0320 13:53:27.115290 4690 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:27 crc kubenswrapper[4690]: I0320 13:53:27.287266 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q4dbs"] Mar 20 13:53:29 crc kubenswrapper[4690]: I0320 13:53:29.103769 4690 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q4dbs" podUID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerName="registry-server" containerID="cri-o://b2d8e777876a197b96f0ce528ed8ac1ffaf5368eac84af45a73ccf050254910f" gracePeriod=2 Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.139510 4690 generic.go:334] "Generic (PLEG): container finished" podID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerID="b2d8e777876a197b96f0ce528ed8ac1ffaf5368eac84af45a73ccf050254910f" exitCode=0 Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.139598 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4dbs" event={"ID":"0d0478c8-4f0d-4823-a94f-92ca1fda92a7","Type":"ContainerDied","Data":"b2d8e777876a197b96f0ce528ed8ac1ffaf5368eac84af45a73ccf050254910f"} Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.284478 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.445588 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-utilities\") pod \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.445712 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-catalog-content\") pod \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.445768 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmzpf\" (UniqueName: \"kubernetes.io/projected/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-kube-api-access-wmzpf\") pod \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\" (UID: \"0d0478c8-4f0d-4823-a94f-92ca1fda92a7\") " Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.446546 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-utilities" (OuterVolumeSpecName: "utilities") pod "0d0478c8-4f0d-4823-a94f-92ca1fda92a7" (UID: "0d0478c8-4f0d-4823-a94f-92ca1fda92a7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.453817 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-kube-api-access-wmzpf" (OuterVolumeSpecName: "kube-api-access-wmzpf") pod "0d0478c8-4f0d-4823-a94f-92ca1fda92a7" (UID: "0d0478c8-4f0d-4823-a94f-92ca1fda92a7"). InnerVolumeSpecName "kube-api-access-wmzpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.504267 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0d0478c8-4f0d-4823-a94f-92ca1fda92a7" (UID: "0d0478c8-4f0d-4823-a94f-92ca1fda92a7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.548385 4690 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-utilities\") on node \"crc\" DevicePath \"\"" Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.548427 4690 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-catalog-content\") on node \"crc\" DevicePath \"\"" Mar 20 13:53:30 crc kubenswrapper[4690]: I0320 13:53:30.548444 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmzpf\" (UniqueName: \"kubernetes.io/projected/0d0478c8-4f0d-4823-a94f-92ca1fda92a7-kube-api-access-wmzpf\") on node \"crc\" DevicePath \"\"" Mar 20 13:53:31 crc kubenswrapper[4690]: I0320 13:53:31.031146 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-bsz48"] Mar 20 13:53:31 crc kubenswrapper[4690]: I0320 13:53:31.040692 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-bsz48"] Mar 20 13:53:31 crc kubenswrapper[4690]: I0320 13:53:31.153345 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q4dbs" event={"ID":"0d0478c8-4f0d-4823-a94f-92ca1fda92a7","Type":"ContainerDied","Data":"956fe6038d403041ccc88619e92bce9021a1ba108eaa9517a7c189eb59d2576a"} Mar 20 13:53:31 crc kubenswrapper[4690]: I0320 13:53:31.153380 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q4dbs" Mar 20 13:53:31 crc kubenswrapper[4690]: I0320 13:53:31.153408 4690 scope.go:117] "RemoveContainer" containerID="b2d8e777876a197b96f0ce528ed8ac1ffaf5368eac84af45a73ccf050254910f" Mar 20 13:53:31 crc kubenswrapper[4690]: I0320 13:53:31.185130 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q4dbs"] Mar 20 13:53:31 crc kubenswrapper[4690]: I0320 13:53:31.190462 4690 scope.go:117] "RemoveContainer" containerID="c7f06f10980dff954e0347f26791016260892da7e732cec886149c73f20adf35" Mar 20 13:53:31 crc kubenswrapper[4690]: I0320 13:53:31.193176 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q4dbs"] Mar 20 13:53:31 crc kubenswrapper[4690]: I0320 13:53:31.210216 4690 scope.go:117] "RemoveContainer" containerID="63b491d748e97f1ba3c248c1190a16fa7b2820b8090537799b15e5a8524a8071" Mar 20 13:53:32 crc kubenswrapper[4690]: I0320 13:53:32.425957 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" path="/var/lib/kubelet/pods/0d0478c8-4f0d-4823-a94f-92ca1fda92a7/volumes" Mar 20 13:53:32 crc kubenswrapper[4690]: I0320 13:53:32.427154 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d6a06dc2-5128-47d4-a10a-e2ba196ec0c9" path="/var/lib/kubelet/pods/d6a06dc2-5128-47d4-a10a-e2ba196ec0c9/volumes" Mar 20 13:53:36 crc kubenswrapper[4690]: I0320 13:53:36.434403 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:53:36 crc kubenswrapper[4690]: E0320 13:53:36.435079 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:53:37 crc kubenswrapper[4690]: I0320 13:53:37.034201 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-vgnp6"] Mar 20 13:53:37 crc kubenswrapper[4690]: I0320 13:53:37.042561 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-42k9z"] Mar 20 13:53:37 crc kubenswrapper[4690]: I0320 13:53:37.049219 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-vgnp6"] Mar 20 13:53:37 crc kubenswrapper[4690]: I0320 13:53:37.057535 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-42k9z"] Mar 20 13:53:38 crc kubenswrapper[4690]: I0320 13:53:38.449339 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6629e615-4e98-4e99-b7dc-6990b379d93c" path="/var/lib/kubelet/pods/6629e615-4e98-4e99-b7dc-6990b379d93c/volumes" Mar 20 13:53:38 crc kubenswrapper[4690]: I0320 13:53:38.451108 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eea9b8e9-b9d0-49ca-ad22-aaf7450c1007" path="/var/lib/kubelet/pods/eea9b8e9-b9d0-49ca-ad22-aaf7450c1007/volumes" Mar 20 13:53:48 crc kubenswrapper[4690]: I0320 13:53:48.415632 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:53:48 crc kubenswrapper[4690]: E0320 13:53:48.416913 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:53:50 crc kubenswrapper[4690]: I0320 13:53:50.042490 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-6rgrr"] Mar 20 13:53:50 crc kubenswrapper[4690]: I0320 13:53:50.051387 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-6rgrr"] Mar 20 13:53:50 crc kubenswrapper[4690]: I0320 13:53:50.422973 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3def27d2-bdda-4c07-b4b2-f695994bd509" path="/var/lib/kubelet/pods/3def27d2-bdda-4c07-b4b2-f695994bd509/volumes" Mar 20 13:53:54 crc kubenswrapper[4690]: I0320 13:53:54.749502 4690 scope.go:117] "RemoveContainer" containerID="a2ec647149e8258ed55ef0a1b018da90f2cf1939843a8b2d91b52e6e2d164078" Mar 20 13:53:54 crc kubenswrapper[4690]: I0320 13:53:54.823360 4690 scope.go:117] "RemoveContainer" containerID="f73270b307867eed494571e6f392667d0c3f6765b8a9e8edd0c26f70820ca006" Mar 20 13:53:54 crc kubenswrapper[4690]: I0320 13:53:54.876623 4690 scope.go:117] "RemoveContainer" containerID="d2a041c9a49926b0aa0ee09c2bb260b6f40b3a0c827cc8216d752c83cf4b41d6" Mar 20 13:53:54 crc kubenswrapper[4690]: I0320 13:53:54.904730 4690 scope.go:117] "RemoveContainer" containerID="6eb961c8d2c437e3c78c36daeeee88a831078f66dc0a9d85833a5bc4727e0274" Mar 20 13:53:54 crc kubenswrapper[4690]: I0320 13:53:54.990818 4690 scope.go:117] "RemoveContainer" containerID="418043c73aa80775deb228b17b1b94ea5f7409fbf50499f0fd6c27dc8ddeef46" Mar 20 13:53:55 crc kubenswrapper[4690]: I0320 13:53:55.015964 4690 scope.go:117] "RemoveContainer" containerID="6111af7d9d2d98e5bb885344274a5405f8b1848e3df4d659b5d17f660ced3ac9" Mar 20 13:53:55 crc kubenswrapper[4690]: I0320 13:53:55.068545 4690 scope.go:117] "RemoveContainer" containerID="7284d0860af431e8fdf37a986f140380d9d4dfdc40c07b70a50f32946980e80a" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.163506 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566914-fbd4v"] Mar 20 13:54:00 crc kubenswrapper[4690]: E0320 13:54:00.165155 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerName="registry-server" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.165189 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerName="registry-server" Mar 20 13:54:00 crc kubenswrapper[4690]: E0320 13:54:00.165261 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerName="extract-utilities" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.165279 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerName="extract-utilities" Mar 20 13:54:00 crc kubenswrapper[4690]: E0320 13:54:00.165338 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerName="extract-content" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.165356 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerName="extract-content" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.165724 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d0478c8-4f0d-4823-a94f-92ca1fda92a7" containerName="registry-server" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.167029 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566914-fbd4v" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.170145 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.171082 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.171095 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.172445 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566914-fbd4v"] Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.260083 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8xn28\" (UniqueName: \"kubernetes.io/projected/87deed05-0ddc-4abb-a022-361316422072-kube-api-access-8xn28\") pod \"auto-csr-approver-29566914-fbd4v\" (UID: \"87deed05-0ddc-4abb-a022-361316422072\") " pod="openshift-infra/auto-csr-approver-29566914-fbd4v" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.361929 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8xn28\" (UniqueName: \"kubernetes.io/projected/87deed05-0ddc-4abb-a022-361316422072-kube-api-access-8xn28\") pod \"auto-csr-approver-29566914-fbd4v\" (UID: \"87deed05-0ddc-4abb-a022-361316422072\") " pod="openshift-infra/auto-csr-approver-29566914-fbd4v" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.384335 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8xn28\" (UniqueName: \"kubernetes.io/projected/87deed05-0ddc-4abb-a022-361316422072-kube-api-access-8xn28\") pod \"auto-csr-approver-29566914-fbd4v\" (UID: \"87deed05-0ddc-4abb-a022-361316422072\") " pod="openshift-infra/auto-csr-approver-29566914-fbd4v" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.504561 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566914-fbd4v" Mar 20 13:54:00 crc kubenswrapper[4690]: I0320 13:54:00.988864 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566914-fbd4v"] Mar 20 13:54:00 crc kubenswrapper[4690]: W0320 13:54:00.997002 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87deed05_0ddc_4abb_a022_361316422072.slice/crio-6cb95e3d8aad6bed18f99d884846ab20e92bec4796e9c14d3b37f82be9b6a89f WatchSource:0}: Error finding container 6cb95e3d8aad6bed18f99d884846ab20e92bec4796e9c14d3b37f82be9b6a89f: Status 404 returned error can't find the container with id 6cb95e3d8aad6bed18f99d884846ab20e92bec4796e9c14d3b37f82be9b6a89f Mar 20 13:54:01 crc kubenswrapper[4690]: I0320 13:54:01.463528 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566914-fbd4v" event={"ID":"87deed05-0ddc-4abb-a022-361316422072","Type":"ContainerStarted","Data":"6cb95e3d8aad6bed18f99d884846ab20e92bec4796e9c14d3b37f82be9b6a89f"} Mar 20 13:54:02 crc kubenswrapper[4690]: I0320 13:54:02.415525 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:54:02 crc kubenswrapper[4690]: E0320 13:54:02.416062 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:54:02 crc kubenswrapper[4690]: I0320 13:54:02.472459 4690 generic.go:334] "Generic (PLEG): container finished" podID="87deed05-0ddc-4abb-a022-361316422072" containerID="fe0453a548035028740820c816529c088af80e7de7bf442d37c1e7b647388bab" exitCode=0 Mar 20 13:54:02 crc kubenswrapper[4690]: I0320 13:54:02.472519 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566914-fbd4v" event={"ID":"87deed05-0ddc-4abb-a022-361316422072","Type":"ContainerDied","Data":"fe0453a548035028740820c816529c088af80e7de7bf442d37c1e7b647388bab"} Mar 20 13:54:03 crc kubenswrapper[4690]: I0320 13:54:03.850212 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566914-fbd4v" Mar 20 13:54:04 crc kubenswrapper[4690]: I0320 13:54:04.036389 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8xn28\" (UniqueName: \"kubernetes.io/projected/87deed05-0ddc-4abb-a022-361316422072-kube-api-access-8xn28\") pod \"87deed05-0ddc-4abb-a022-361316422072\" (UID: \"87deed05-0ddc-4abb-a022-361316422072\") " Mar 20 13:54:04 crc kubenswrapper[4690]: I0320 13:54:04.042324 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87deed05-0ddc-4abb-a022-361316422072-kube-api-access-8xn28" (OuterVolumeSpecName: "kube-api-access-8xn28") pod "87deed05-0ddc-4abb-a022-361316422072" (UID: "87deed05-0ddc-4abb-a022-361316422072"). InnerVolumeSpecName "kube-api-access-8xn28". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:54:04 crc kubenswrapper[4690]: I0320 13:54:04.138814 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8xn28\" (UniqueName: \"kubernetes.io/projected/87deed05-0ddc-4abb-a022-361316422072-kube-api-access-8xn28\") on node \"crc\" DevicePath \"\"" Mar 20 13:54:04 crc kubenswrapper[4690]: I0320 13:54:04.498721 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566914-fbd4v" event={"ID":"87deed05-0ddc-4abb-a022-361316422072","Type":"ContainerDied","Data":"6cb95e3d8aad6bed18f99d884846ab20e92bec4796e9c14d3b37f82be9b6a89f"} Mar 20 13:54:04 crc kubenswrapper[4690]: I0320 13:54:04.499129 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6cb95e3d8aad6bed18f99d884846ab20e92bec4796e9c14d3b37f82be9b6a89f" Mar 20 13:54:04 crc kubenswrapper[4690]: I0320 13:54:04.498818 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566914-fbd4v" Mar 20 13:54:04 crc kubenswrapper[4690]: I0320 13:54:04.937501 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566908-5qpcd"] Mar 20 13:54:04 crc kubenswrapper[4690]: I0320 13:54:04.946645 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566908-5qpcd"] Mar 20 13:54:06 crc kubenswrapper[4690]: I0320 13:54:06.431556 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cda1a881-a0aa-4c7a-bfff-a3e22aeb8919" path="/var/lib/kubelet/pods/cda1a881-a0aa-4c7a-bfff-a3e22aeb8919/volumes" Mar 20 13:54:14 crc kubenswrapper[4690]: I0320 13:54:14.427767 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:54:14 crc kubenswrapper[4690]: E0320 13:54:14.429269 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:54:28 crc kubenswrapper[4690]: I0320 13:54:28.415179 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:54:28 crc kubenswrapper[4690]: E0320 13:54:28.416616 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:54:33 crc kubenswrapper[4690]: I0320 13:54:33.073617 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-x7tz6"] Mar 20 13:54:33 crc kubenswrapper[4690]: I0320 13:54:33.088987 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-x7tz6"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.044829 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-p7w2k"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.057489 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-9b74-account-create-update-qrqkx"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.070099 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-619b-account-create-update-vld5x"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.077163 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-9b74-account-create-update-qrqkx"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.083728 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-p7w2k"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.090120 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-tqb96"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.096391 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-1ed3-account-create-update-xqkdf"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.102600 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-619b-account-create-update-vld5x"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.108973 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-tqb96"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.115701 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-1ed3-account-create-update-xqkdf"] Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.427616 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1ffaa372-aeed-471d-b5ba-f7692e1daad8" path="/var/lib/kubelet/pods/1ffaa372-aeed-471d-b5ba-f7692e1daad8/volumes" Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.428491 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7338a418-a221-409b-bafd-666e7cc66a8e" path="/var/lib/kubelet/pods/7338a418-a221-409b-bafd-666e7cc66a8e/volumes" Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.429353 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="817d49d2-79e2-42f0-b503-bd6bf78f1459" path="/var/lib/kubelet/pods/817d49d2-79e2-42f0-b503-bd6bf78f1459/volumes" Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.430212 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b" path="/var/lib/kubelet/pods/a9d9f4cd-474e-4175-8b33-3e5ebdcd0c3b/volumes" Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.432108 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b58df85c-1bf7-41ba-9839-d74172783a24" path="/var/lib/kubelet/pods/b58df85c-1bf7-41ba-9839-d74172783a24/volumes" Mar 20 13:54:34 crc kubenswrapper[4690]: I0320 13:54:34.433093 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d7435c23-ad0c-484c-bc24-5cceb7e01ccc" path="/var/lib/kubelet/pods/d7435c23-ad0c-484c-bc24-5cceb7e01ccc/volumes" Mar 20 13:54:40 crc kubenswrapper[4690]: I0320 13:54:40.414768 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:54:40 crc kubenswrapper[4690]: E0320 13:54:40.416076 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:54:55 crc kubenswrapper[4690]: I0320 13:54:55.250018 4690 scope.go:117] "RemoveContainer" containerID="b7a94a9c4e2cbee80df5b492c305daaf930a8bb205ac42b2a2d81272b77cf189" Mar 20 13:54:55 crc kubenswrapper[4690]: I0320 13:54:55.294949 4690 scope.go:117] "RemoveContainer" containerID="96e277a9a8ed9b1d9afe8ddfcc42cd0e56fcfe2f4e1645e186d93eaa68185fde" Mar 20 13:54:55 crc kubenswrapper[4690]: I0320 13:54:55.399959 4690 scope.go:117] "RemoveContainer" containerID="788a6967c75f9832550ea8889692f14c85f612ff484fec3a904580e18e06beb1" Mar 20 13:54:55 crc kubenswrapper[4690]: I0320 13:54:55.414914 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:54:55 crc kubenswrapper[4690]: E0320 13:54:55.415331 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:54:55 crc kubenswrapper[4690]: I0320 13:54:55.430910 4690 scope.go:117] "RemoveContainer" containerID="3aa218be34916d51eb763aaf84856395e4fbc132ed761cd58796e55dbefd4b25" Mar 20 13:54:55 crc kubenswrapper[4690]: I0320 13:54:55.470691 4690 scope.go:117] "RemoveContainer" containerID="94cdd72e96f87b4b2ec8d1c9b4e7ef5245d3914543d084cb1717fc341d41f9af" Mar 20 13:54:55 crc kubenswrapper[4690]: I0320 13:54:55.508152 4690 scope.go:117] "RemoveContainer" containerID="40641d2f53160702c35271b9e674c92dc2503c78579cb86299d72e4cbc4b112d" Mar 20 13:54:55 crc kubenswrapper[4690]: I0320 13:54:55.556584 4690 scope.go:117] "RemoveContainer" containerID="f6f68c193d54c40052e78503db7d7888563cd274fa14af6f0f4e7ee67f2589e2" Mar 20 13:55:01 crc kubenswrapper[4690]: I0320 13:55:01.047465 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fvfgz"] Mar 20 13:55:01 crc kubenswrapper[4690]: I0320 13:55:01.055127 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fvfgz"] Mar 20 13:55:02 crc kubenswrapper[4690]: I0320 13:55:02.428887 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ecaabd8-5cb5-4e0f-b5c8-c73075e68880" path="/var/lib/kubelet/pods/8ecaabd8-5cb5-4e0f-b5c8-c73075e68880/volumes" Mar 20 13:55:06 crc kubenswrapper[4690]: I0320 13:55:06.415485 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:55:06 crc kubenswrapper[4690]: E0320 13:55:06.416953 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:55:19 crc kubenswrapper[4690]: I0320 13:55:19.414564 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:55:19 crc kubenswrapper[4690]: E0320 13:55:19.415968 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:55:22 crc kubenswrapper[4690]: I0320 13:55:22.039247 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-sb7x2"] Mar 20 13:55:22 crc kubenswrapper[4690]: I0320 13:55:22.053965 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-sb7x2"] Mar 20 13:55:22 crc kubenswrapper[4690]: I0320 13:55:22.435400 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69168ed4-2cdf-4be8-8ae0-917d89a54670" path="/var/lib/kubelet/pods/69168ed4-2cdf-4be8-8ae0-917d89a54670/volumes" Mar 20 13:55:23 crc kubenswrapper[4690]: I0320 13:55:23.051577 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pr965"] Mar 20 13:55:23 crc kubenswrapper[4690]: I0320 13:55:23.062362 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-pr965"] Mar 20 13:55:24 crc kubenswrapper[4690]: I0320 13:55:24.437699 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f50d733d-5439-49fc-af1b-bb36c5b3c739" path="/var/lib/kubelet/pods/f50d733d-5439-49fc-af1b-bb36c5b3c739/volumes" Mar 20 13:55:31 crc kubenswrapper[4690]: I0320 13:55:31.415180 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:55:31 crc kubenswrapper[4690]: E0320 13:55:31.415953 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:55:45 crc kubenswrapper[4690]: I0320 13:55:45.415296 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:55:45 crc kubenswrapper[4690]: E0320 13:55:45.416653 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:55:55 crc kubenswrapper[4690]: I0320 13:55:55.692723 4690 scope.go:117] "RemoveContainer" containerID="f3edb3590e62d270cce5c3faa54306f716894ae1c8e4f9c6f5670ef8c23020e4" Mar 20 13:55:55 crc kubenswrapper[4690]: I0320 13:55:55.756998 4690 scope.go:117] "RemoveContainer" containerID="1d90b4a3dcc4853a2310f0bd033a3c18fd91aeb397d8ac19266e713093dc867b" Mar 20 13:55:55 crc kubenswrapper[4690]: I0320 13:55:55.821519 4690 scope.go:117] "RemoveContainer" containerID="7c70b70602baf750e77d197fee8c81b82c32f4e3bb0697171ced29df5fe816d9" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.157270 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566916-s5mwg"] Mar 20 13:56:00 crc kubenswrapper[4690]: E0320 13:56:00.158119 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87deed05-0ddc-4abb-a022-361316422072" containerName="oc" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.158135 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="87deed05-0ddc-4abb-a022-361316422072" containerName="oc" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.158360 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="87deed05-0ddc-4abb-a022-361316422072" containerName="oc" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.159017 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566916-s5mwg" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.162622 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.163553 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.163962 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.182380 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566916-s5mwg"] Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.348587 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd4fk\" (UniqueName: \"kubernetes.io/projected/cb269586-b6ee-47bc-b9a6-bcbb59208660-kube-api-access-sd4fk\") pod \"auto-csr-approver-29566916-s5mwg\" (UID: \"cb269586-b6ee-47bc-b9a6-bcbb59208660\") " pod="openshift-infra/auto-csr-approver-29566916-s5mwg" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.415150 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:56:00 crc kubenswrapper[4690]: E0320 13:56:00.415567 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.451600 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd4fk\" (UniqueName: \"kubernetes.io/projected/cb269586-b6ee-47bc-b9a6-bcbb59208660-kube-api-access-sd4fk\") pod \"auto-csr-approver-29566916-s5mwg\" (UID: \"cb269586-b6ee-47bc-b9a6-bcbb59208660\") " pod="openshift-infra/auto-csr-approver-29566916-s5mwg" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.476437 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd4fk\" (UniqueName: \"kubernetes.io/projected/cb269586-b6ee-47bc-b9a6-bcbb59208660-kube-api-access-sd4fk\") pod \"auto-csr-approver-29566916-s5mwg\" (UID: \"cb269586-b6ee-47bc-b9a6-bcbb59208660\") " pod="openshift-infra/auto-csr-approver-29566916-s5mwg" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.490879 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566916-s5mwg" Mar 20 13:56:00 crc kubenswrapper[4690]: I0320 13:56:00.929243 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566916-s5mwg"] Mar 20 13:56:01 crc kubenswrapper[4690]: I0320 13:56:01.883687 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566916-s5mwg" event={"ID":"cb269586-b6ee-47bc-b9a6-bcbb59208660","Type":"ContainerStarted","Data":"fe47decf04d203ae0ef65708b86543cfa1c3e6d5d62b797ee83b1d8f7258ce18"} Mar 20 13:56:02 crc kubenswrapper[4690]: I0320 13:56:02.891506 4690 generic.go:334] "Generic (PLEG): container finished" podID="cb269586-b6ee-47bc-b9a6-bcbb59208660" containerID="3d33265736a1a2cfcd40ffe7af56dd53534187e4cabadf5a2b7530f7d84f58c7" exitCode=0 Mar 20 13:56:02 crc kubenswrapper[4690]: I0320 13:56:02.891576 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566916-s5mwg" event={"ID":"cb269586-b6ee-47bc-b9a6-bcbb59208660","Type":"ContainerDied","Data":"3d33265736a1a2cfcd40ffe7af56dd53534187e4cabadf5a2b7530f7d84f58c7"} Mar 20 13:56:04 crc kubenswrapper[4690]: I0320 13:56:04.394707 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566916-s5mwg" Mar 20 13:56:04 crc kubenswrapper[4690]: I0320 13:56:04.533631 4690 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sd4fk\" (UniqueName: \"kubernetes.io/projected/cb269586-b6ee-47bc-b9a6-bcbb59208660-kube-api-access-sd4fk\") pod \"cb269586-b6ee-47bc-b9a6-bcbb59208660\" (UID: \"cb269586-b6ee-47bc-b9a6-bcbb59208660\") " Mar 20 13:56:04 crc kubenswrapper[4690]: I0320 13:56:04.542077 4690 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb269586-b6ee-47bc-b9a6-bcbb59208660-kube-api-access-sd4fk" (OuterVolumeSpecName: "kube-api-access-sd4fk") pod "cb269586-b6ee-47bc-b9a6-bcbb59208660" (UID: "cb269586-b6ee-47bc-b9a6-bcbb59208660"). InnerVolumeSpecName "kube-api-access-sd4fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Mar 20 13:56:04 crc kubenswrapper[4690]: I0320 13:56:04.635986 4690 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sd4fk\" (UniqueName: \"kubernetes.io/projected/cb269586-b6ee-47bc-b9a6-bcbb59208660-kube-api-access-sd4fk\") on node \"crc\" DevicePath \"\"" Mar 20 13:56:04 crc kubenswrapper[4690]: I0320 13:56:04.914513 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-infra/auto-csr-approver-29566916-s5mwg" event={"ID":"cb269586-b6ee-47bc-b9a6-bcbb59208660","Type":"ContainerDied","Data":"fe47decf04d203ae0ef65708b86543cfa1c3e6d5d62b797ee83b1d8f7258ce18"} Mar 20 13:56:04 crc kubenswrapper[4690]: I0320 13:56:04.914561 4690 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe47decf04d203ae0ef65708b86543cfa1c3e6d5d62b797ee83b1d8f7258ce18" Mar 20 13:56:04 crc kubenswrapper[4690]: I0320 13:56:04.914712 4690 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566916-s5mwg" Mar 20 13:56:05 crc kubenswrapper[4690]: I0320 13:56:05.490256 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-infra/auto-csr-approver-29566910-mnbvl"] Mar 20 13:56:05 crc kubenswrapper[4690]: I0320 13:56:05.504367 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-infra/auto-csr-approver-29566910-mnbvl"] Mar 20 13:56:06 crc kubenswrapper[4690]: I0320 13:56:06.426553 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15efb6c7-12eb-4214-a3da-83604ed317c1" path="/var/lib/kubelet/pods/15efb6c7-12eb-4214-a3da-83604ed317c1/volumes" Mar 20 13:56:08 crc kubenswrapper[4690]: I0320 13:56:08.040759 4690 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-fmsn9"] Mar 20 13:56:08 crc kubenswrapper[4690]: I0320 13:56:08.071295 4690 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-fmsn9"] Mar 20 13:56:08 crc kubenswrapper[4690]: I0320 13:56:08.432935 4690 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce04bc8c-c482-4ff3-a0b5-303db0874640" path="/var/lib/kubelet/pods/ce04bc8c-c482-4ff3-a0b5-303db0874640/volumes" Mar 20 13:56:13 crc kubenswrapper[4690]: I0320 13:56:13.414810 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:56:13 crc kubenswrapper[4690]: E0320 13:56:13.416367 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:56:25 crc kubenswrapper[4690]: I0320 13:56:25.415103 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:56:25 crc kubenswrapper[4690]: E0320 13:56:25.416043 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:56:39 crc kubenswrapper[4690]: I0320 13:56:39.414578 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:56:39 crc kubenswrapper[4690]: E0320 13:56:39.415592 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:56:53 crc kubenswrapper[4690]: I0320 13:56:53.415166 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:56:53 crc kubenswrapper[4690]: E0320 13:56:53.416544 4690 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-ftcqx_openshift-machine-config-operator(60ded650-b298-4115-8286-8969b94d4062)\"" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" podUID="60ded650-b298-4115-8286-8969b94d4062" Mar 20 13:56:55 crc kubenswrapper[4690]: I0320 13:56:55.941758 4690 scope.go:117] "RemoveContainer" containerID="140ac62382e0552785029f1f967e060e15393b0ada75296d7b0ad62b768a50b9" Mar 20 13:56:55 crc kubenswrapper[4690]: I0320 13:56:55.993377 4690 scope.go:117] "RemoveContainer" containerID="20ae55ebe3c5a3340627432d80cf0dfea2ab61caa9146b255dfd82709f600489" Mar 20 13:57:05 crc kubenswrapper[4690]: I0320 13:57:05.414741 4690 scope.go:117] "RemoveContainer" containerID="fbc28986138a37490ee36bcc83eb1787494ecf94dc0b9ee0ab77c7f335569651" Mar 20 13:57:06 crc kubenswrapper[4690]: I0320 13:57:06.583236 4690 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ftcqx" event={"ID":"60ded650-b298-4115-8286-8969b94d4062","Type":"ContainerStarted","Data":"4932e2cc623d51eaf14e364f9428f5c612a2d34aaf2f95de8e746024bf37dd11"} Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.163640 4690 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-infra/auto-csr-approver-29566918-66524"] Mar 20 13:58:00 crc kubenswrapper[4690]: E0320 13:58:00.164706 4690 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb269586-b6ee-47bc-b9a6-bcbb59208660" containerName="oc" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.164723 4690 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb269586-b6ee-47bc-b9a6-bcbb59208660" containerName="oc" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.165034 4690 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb269586-b6ee-47bc-b9a6-bcbb59208660" containerName="oc" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.165753 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566918-66524" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.169299 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"kube-root-ca.crt" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.170138 4690 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-infra"/"openshift-service-ca.crt" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.170989 4690 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-infra"/"csr-approver-sa-dockercfg-n6wtc" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.175956 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566918-66524"] Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.319687 4690 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8c58\" (UniqueName: \"kubernetes.io/projected/10568f58-0c41-48a7-941b-9b81eadfcc09-kube-api-access-d8c58\") pod \"auto-csr-approver-29566918-66524\" (UID: \"10568f58-0c41-48a7-941b-9b81eadfcc09\") " pod="openshift-infra/auto-csr-approver-29566918-66524" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.421536 4690 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8c58\" (UniqueName: \"kubernetes.io/projected/10568f58-0c41-48a7-941b-9b81eadfcc09-kube-api-access-d8c58\") pod \"auto-csr-approver-29566918-66524\" (UID: \"10568f58-0c41-48a7-941b-9b81eadfcc09\") " pod="openshift-infra/auto-csr-approver-29566918-66524" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.446282 4690 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8c58\" (UniqueName: \"kubernetes.io/projected/10568f58-0c41-48a7-941b-9b81eadfcc09-kube-api-access-d8c58\") pod \"auto-csr-approver-29566918-66524\" (UID: \"10568f58-0c41-48a7-941b-9b81eadfcc09\") " pod="openshift-infra/auto-csr-approver-29566918-66524" Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.504583 4690 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-infra/auto-csr-approver-29566918-66524" Mar 20 13:58:00 crc kubenswrapper[4690]: W0320 13:58:00.953449 4690 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod10568f58_0c41_48a7_941b_9b81eadfcc09.slice/crio-d2ec8df844298fa17953ee3ff155a3efacb816ddb0d410fdc480cf277db1e1da WatchSource:0}: Error finding container d2ec8df844298fa17953ee3ff155a3efacb816ddb0d410fdc480cf277db1e1da: Status 404 returned error can't find the container with id d2ec8df844298fa17953ee3ff155a3efacb816ddb0d410fdc480cf277db1e1da Mar 20 13:58:00 crc kubenswrapper[4690]: I0320 13:58:00.962290 4690 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-infra/auto-csr-approver-29566918-66524"] var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515157251162024452 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015157251163017370 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015157244517016520 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015157244517015470 5ustar corecore